diff --git a/cfg/exp/ablation/feature_threshold/slic100_dino224_16.yaml b/.deprecated/cfg/exp/ablation/feature_threshold/slic100_dino224_16.yaml similarity index 100% rename from cfg/exp/ablation/feature_threshold/slic100_dino224_16.yaml rename to .deprecated/cfg/exp/ablation/feature_threshold/slic100_dino224_16.yaml diff --git a/cfg/exp/ablation/feature_threshold/slic100_efficientnet_b4.yaml b/.deprecated/cfg/exp/ablation/feature_threshold/slic100_efficientnet_b4.yaml similarity index 100% rename from cfg/exp/ablation/feature_threshold/slic100_efficientnet_b4.yaml rename to .deprecated/cfg/exp/ablation/feature_threshold/slic100_efficientnet_b4.yaml diff --git a/cfg/exp/ablation/feature_threshold/slic100_resnet18.yaml b/.deprecated/cfg/exp/ablation/feature_threshold/slic100_resnet18.yaml similarity index 100% rename from cfg/exp/ablation/feature_threshold/slic100_resnet18.yaml rename to .deprecated/cfg/exp/ablation/feature_threshold/slic100_resnet18.yaml diff --git a/cfg/exp/ablation/feature_threshold/slic100_resnet50.yaml b/.deprecated/cfg/exp/ablation/feature_threshold/slic100_resnet50.yaml similarity index 100% rename from cfg/exp/ablation/feature_threshold/slic100_resnet50.yaml rename to .deprecated/cfg/exp/ablation/feature_threshold/slic100_resnet50.yaml diff --git a/cfg/exp/ablation/feature_threshold/slic100_resnet50_dino.yaml b/.deprecated/cfg/exp/ablation/feature_threshold/slic100_resnet50_dino.yaml similarity index 100% rename from cfg/exp/ablation/feature_threshold/slic100_resnet50_dino.yaml rename to .deprecated/cfg/exp/ablation/feature_threshold/slic100_resnet50_dino.yaml diff --git a/cfg/exp/ablation/feature_threshold/slic100_sift.yaml b/.deprecated/cfg/exp/ablation/feature_threshold/slic100_sift.yaml similarity index 100% rename from cfg/exp/ablation/feature_threshold/slic100_sift.yaml rename to .deprecated/cfg/exp/ablation/feature_threshold/slic100_sift.yaml diff --git a/cfg/exp/ablation/loss_function/anomaly_only.yaml b/.deprecated/cfg/exp/ablation/loss_function/anomaly_only.yaml similarity index 100% rename from cfg/exp/ablation/loss_function/anomaly_only.yaml rename to .deprecated/cfg/exp/ablation/loss_function/anomaly_only.yaml diff --git a/cfg/exp/ablation/loss_function/mse_and_anomaly.yaml b/.deprecated/cfg/exp/ablation/loss_function/mse_and_anomaly.yaml similarity index 100% rename from cfg/exp/ablation/loss_function/mse_and_anomaly.yaml rename to .deprecated/cfg/exp/ablation/loss_function/mse_and_anomaly.yaml diff --git a/cfg/exp/ablation/loss_function/mse_and_anomaly_threshold.yaml b/.deprecated/cfg/exp/ablation/loss_function/mse_and_anomaly_threshold.yaml similarity index 100% rename from cfg/exp/ablation/loss_function/mse_and_anomaly_threshold.yaml rename to .deprecated/cfg/exp/ablation/loss_function/mse_and_anomaly_threshold.yaml diff --git a/cfg/exp/ablation/loss_function/mse_only.yaml b/.deprecated/cfg/exp/ablation/loss_function/mse_only.yaml similarity index 100% rename from cfg/exp/ablation/loss_function/mse_only.yaml rename to .deprecated/cfg/exp/ablation/loss_function/mse_only.yaml diff --git a/cfg/exp/ablation/scene_adaptation/default.yaml b/.deprecated/cfg/exp/ablation/scene_adaptation/default.yaml similarity index 100% rename from cfg/exp/ablation/scene_adaptation/default.yaml rename to .deprecated/cfg/exp/ablation/scene_adaptation/default.yaml diff --git a/.deprecated/dataset/__init__.py b/.deprecated/dataset/__init__.py new file mode 100644 index 00000000..7e048bc8 --- /dev/null +++ b/.deprecated/dataset/__init__.py @@ -0,0 +1 @@ +from .graph_trav_dataset import get_ablation_module diff --git a/wild_visual_navigation/learning/dataset/graph_trav_dataset.py b/.deprecated/dataset/graph_trav_dataset.py similarity index 85% rename from wild_visual_navigation/learning/dataset/graph_trav_dataset.py rename to .deprecated/dataset/graph_trav_dataset.py index efc49f80..54348a78 100644 --- a/wild_visual_navigation/learning/dataset/graph_trav_dataset.py +++ b/.deprecated/dataset/graph_trav_dataset.py @@ -1,14 +1,13 @@ -from torch_geometric.data import InMemoryDataset, DataListLoader, DataLoader -from torch_geometric.data import LightningDataset +# TODO +# from torch_geometric.data import InMemoryDataset, DataLoader +# from torch_geometric.data import LightningDataset +# from torch_geometric.data import Dataset from wild_visual_navigation import WVN_ROOT_DIR import os import torch -from pathlib import Path -from torch_geometric.data import Dataset from torchvision import transforms as T from typing import Optional, Callable -from torch_geometric.data import Data import random @@ -46,7 +45,6 @@ def __init__( print("Not found path", img_p) if training_data_percentage < 100: - if int(len(ls) * training_data_percentage / 100) == 0: raise Exception("Defined Training Data Perentage to small !") @@ -187,13 +185,28 @@ def get_test_dataset(perugia_root, env, feature_key, test_all_datasets, training ) ] else: - val_dataset = get_test_dataset(perugia_root, env, feature_key, test_all_datasets, training_data_percentage) + val_dataset = get_test_dataset( + perugia_root, + env, + feature_key, + test_all_datasets, + training_data_percentage, + ) train_loader = DataLoader( - dataset=train_dataset, batch_size=batch_size, num_workers=num_workers, pin_memory=False + dataset=train_dataset, + batch_size=batch_size, + num_workers=num_workers, + pin_memory=False, ) val_loader = [ - DataLoader(dataset=v, batch_size=batch_size, num_workers=num_workers, pin_memory=False) for v in val_dataset + DataLoader( + dataset=v, + batch_size=batch_size, + num_workers=num_workers, + pin_memory=False, + ) + for v in val_dataset ] else: @@ -213,9 +226,20 @@ def get_test_dataset(perugia_root, env, feature_key, test_all_datasets, training ) ] else: - test_dataset = get_test_dataset(perugia_root, env, feature_key, test_all_datasets, training_data_percentage) + test_dataset = get_test_dataset( + perugia_root, + env, + feature_key, + test_all_datasets, + training_data_percentage, + ) test_loader = [ - DataLoader(dataset=t, batch_size=batch_size, num_workers=num_workers, pin_memory=False) + DataLoader( + dataset=t, + batch_size=batch_size, + num_workers=num_workers, + pin_memory=False, + ) for t in test_dataset ] else: diff --git a/wild_visual_navigation/learning/general/__init__.py b/.deprecated/general/__init__.py similarity index 100% rename from wild_visual_navigation/learning/general/__init__.py rename to .deprecated/general/__init__.py diff --git a/.deprecated/general/training_routine.py b/.deprecated/general/training_routine.py new file mode 100644 index 00000000..b2023080 --- /dev/null +++ b/.deprecated/general/training_routine.py @@ -0,0 +1,120 @@ +import os +import warnings +from os.path import join +from omegaconf import read_write + +warnings.filterwarnings("ignore", ".*does not have many workers.*") + +# Frameworks +import torch +from pytorch_lightning import seed_everything, Trainer +from pytorch_lightning.callbacks import EarlyStopping +from pytorch_lightning.callbacks import ModelCheckpoint +from pytorch_lightning.callbacks import LearningRateMonitor +from pytorch_lightning.profiler import AdvancedProfiler + +# Costume Modules +from wild_visual_navigation.utils import get_logger +from wild_visual_navigation.lightning import LightningTrav +from wild_visual_navigation.utils import create_experiment_folder, save_omega_cfg +from wild_visual_navigation.dataset import get_ablation_module +from wild_visual_navigation.cfg import ExperimentParams + +__all__ = ["training_routine"] + + +def training_routine(exp: ExperimentParams, seed=42) -> torch.Tensor: + exp + seed_everything(seed) + + if exp.general.log_to_disk: + model_path = create_experiment_folder(exp) + else: + model_path = exp.general.model_path + + with read_write(exp): + # Update model paths + exp.general.model_path = model_path + exp.general.name = os.path.relpath(model_path, exp.env.results) + exp.trainer.default_root_dir = model_path + exp.visu.learning_visu.p_visu = join(model_path, "visu") + + logger = get_logger(exp) + + # Set gpus + exp.trainer.gpus = 1 if torch.cuda.is_available() else None + + # Profiler + if exp.trainer.get("profiler", False) == "advanced": + exp.trainer.profiler = AdvancedProfiler(dirpath=model_path, filename="profile.txt") + + # Callbacks + cb_ls = [] + if logger is not None: + cb_ls.append(LearningRateMonitor(**exp.lr_monitor)) + + if exp.cb_early_stopping.active: + early_stop_callback = EarlyStopping(**exp.cb_early_stopping.cfg) + cb_ls.appned(early_stop_callback) + + if exp.cb_checkpoint.active: + checkpoint_callback = ModelCheckpoint( + dirpath=model_path, + save_top_k=1, + monitor="epoch", + mode="max", + save_last=True, + ) + cb_ls.append(checkpoint_callback) + + train_dl, val_dl, test_dl = get_ablation_module( + **exp.ablation_data_module, + perugia_root=exp.general.perugia_root, + get_train_val_dataset=not exp.general.skip_train, + get_test_dataset=not exp.ablation_data_module.val_equals_test, + ) + + # Set correct input feature dimension + if train_dl is not None: + sample = train_dl.dataset[0] + else: + sample = test_dl[0].dataset[0] + input_feature_dimension = sample.x.shape[1] + + with read_write(exp): + exp.model.simple_mlp_cfg.input_size = input_feature_dimension + exp.model.simple_gcn_cfg.input_size = input_feature_dimension + exp.model.double_mlp_cfg.input_size = input_feature_dimension + exp.model.linear_rnvp_cfg.input_size = input_feature_dimension + + if exp.general.log_to_disk: + save_omega_cfg(exp, os.path.join(model_path, "experiment_params.yaml")) + + # Model + model = LightningTrav(exp=exp) + if type(exp.model.load_ckpt) == str: + ckpt = torch.load(exp.model.load_ckpt) + try: + res = model.load_state_dict(ckpt.state_dict, strict=False) + except Exception: + res = model.load_state_dict(ckpt, strict=False) + print("Loaded model checkpoint:", res) + trainer = Trainer(**exp.trainer, callbacks=cb_ls, logger=logger) + + if not exp.general.skip_train: + trainer.fit(model=model, train_dataloaders=train_dl, val_dataloaders=val_dl) + + if exp.ablation_data_module.val_equals_test: + return model.accumulated_val_results, model + + # TODO Verify that this makes sense here + test_envs = [] + for j, dl in enumerate(test_dl): + if exp.loss.w_trav == 0: + model._traversability_loss._anomaly_threshold = None + + model.nr_test_run = j + res = trainer.test(model=model, dataloaders=dl)[0] + test_envs.append(dl.dataset.env) + + return {k: v for k, v in zip(test_envs, model.accumulated_test_results)}, model diff --git a/wild_visual_navigation/learning/lightning/__init__.py b/.deprecated/lightning/__init__.py similarity index 100% rename from wild_visual_navigation/learning/lightning/__init__.py rename to .deprecated/lightning/__init__.py diff --git a/wild_visual_navigation/learning/lightning/lightning_module.py b/.deprecated/lightning/lightning_module.py similarity index 80% rename from wild_visual_navigation/learning/lightning/lightning_module.py rename to .deprecated/lightning/lightning_module.py index 69e09fce..5e49bea8 100644 --- a/wild_visual_navigation/learning/lightning/lightning_module.py +++ b/.deprecated/lightning/lightning_module.py @@ -1,27 +1,25 @@ -from wild_visual_navigation.learning.model import get_model +from wild_visual_navigation.model import get_model import pytorch_lightning as pl import torch import torch.nn.functional as F -from os.path import join from wild_visual_navigation.visu import LearningVisualizer from pytorch_lightning.utilities.types import EPOCH_OUTPUT -from torch_geometric.data import Data + +# from torch_geometric.data import Data from torchmetrics import ROC -from wild_visual_navigation.learning.utils import TraversabilityLoss, MetricLogger +from wild_visual_navigation.utils import TraversabilityLoss, MetricLogger import os -import pickle class LightningTrav(pl.LightningModule): - def __init__(self, exp: dict, env: dict, log: bool = False): + def __init__(self, exp: dict, log: bool = False): super().__init__() self._model = get_model(exp["model"]) self._visu_count = {"val": 0, "test": 0, "train": 0} self._visualizer = LearningVisualizer(**exp["visu"]["learning_visu"], pl_model=self) self._exp = exp - self._env = env self._mode = "train" self._log = log @@ -71,18 +69,30 @@ def training_step(self, batch: any, batch_idx: int) -> torch.Tensor: for k, v in loss_aux.items(): if k.find("loss") != -1: - self.log(f"{self._mode}_{k}", v.item(), on_epoch=True, prog_bar=True, batch_size=BS) - self.log(f"{self._mode}_loss", loss.item(), on_epoch=True, prog_bar=True, batch_size=BS) + self.log( + f"{self._mode}_{k}", + v.item(), + on_epoch=True, + prog_bar=True, + batch_size=BS, + ) + self.log( + f"{self._mode}_loss", + loss.item(), + on_epoch=True, + prog_bar=True, + batch_size=BS, + ) self.visu(graph, res_updated, loss_aux["confidence"]) # This mask should contain all the segments corrosponding to trees. mask_anomaly = loss_aux["confidence"] < 0.5 - mask_proprioceptive = graph.y == 1 + mask_supervision = graph.y == 1 # Remove the segments that are for sure not an anomalies given that we have walked on them. - mask_anomaly[mask_proprioceptive] = False + mask_anomaly[mask_supervision] = False # Elements are valid if they are either an anomaly or we have walked on them to fit the ROC - mask_valid = mask_anomaly | mask_proprioceptive + mask_valid = mask_anomaly | mask_supervision self._auxiliary_training_roc(res_updated[mask_valid, 0], graph.y[mask_valid].type(torch.long)) return loss @@ -113,9 +123,21 @@ def validation_step(self, batch: any, batch_idx: int, dataloader_id: int = 0) -> for k, v in loss_aux.items(): if k.find("loss") != -1: - self.log(f"{self._mode}_{k}", v.item(), on_epoch=True, prog_bar=True, batch_size=BS) + self.log( + f"{self._mode}_{k}", + v.item(), + on_epoch=True, + prog_bar=True, + batch_size=BS, + ) - self.log(f"{self._mode}_loss", loss.item(), on_epoch=True, prog_bar=True, batch_size=BS) + self.log( + f"{self._mode}_loss", + loss.item(), + on_epoch=True, + prog_bar=True, + batch_size=BS, + ) self.visu(graph, res_updated, loss_aux["confidence"]) return loss @@ -134,7 +156,7 @@ def update_threshold(self): fpr, tpr, thresholds = self._auxiliary_training_roc.compute() index = torch.where(fpr > 0.15)[0][0] self.threshold[0] = thresholds[index] - except: + except Exception: pass else: self.threshold[0] = 0.5 @@ -159,13 +181,25 @@ def test_step(self, batch: any, batch_idx: int, dataloader_id: int = 0) -> torch for k, v in loss_aux.items(): if k.find("loss") != -1: - self.log(f"{self._mode}_{k}", v.item(), on_epoch=True, prog_bar=True, batch_size=BS) - self.log(f"{self._mode}_loss", loss.item(), on_epoch=True, prog_bar=True, batch_size=BS) + self.log( + f"{self._mode}_{k}", + v.item(), + on_epoch=True, + prog_bar=True, + batch_size=BS, + ) + self.log( + f"{self._mode}_loss", + loss.item(), + on_epoch=True, + prog_bar=True, + batch_size=BS, + ) self.visu(graph, res_updated, loss_aux["confidence"]) return loss def test_epoch_end(self, outputs: any, plot=False): - ################ NEW VERSION ################ + # NEW VERSION res = self._metric_logger.get_epoch_results("test") dic2 = { "trainer_logged_metric" + k: v.item() @@ -200,7 +234,7 @@ def visu(self, graph: Data, res: torch.tensor, confidence: torch.tensor): on_epoch=True, batch_size=graph.ptr.shape[0] - 1, ) - except: + except Exception: pass for b in range(graph.ptr.shape[0] - 1): @@ -233,14 +267,22 @@ def visu(self, graph: Data, res: torch.tensor, confidence: torch.tensor): # Visualize Graph with Segmentation t1 = self._visualizer.plot_traversability_graph_on_seg( - pred[:, 0], seg, graph[b], center, img, not_log=True, colorize_invalid_centers=True + pred[:, 0], + seg, + graph[b], + center, + img, + not_log=True, + colorize_invalid_centers=True, ) t2 = self._visualizer.plot_traversability_graph_on_seg( graph[b].y, seg, graph[b], center, img, not_log=True ) t_img = self._visualizer.plot_image(img, not_log=True) self._visualizer.plot_list( - imgs=[t1, t2, t_img], tag=f"C{c}_{self._mode}_GraphTrav", store_folder=f"{self._mode}/graph_trav" + imgs=[t1, t2, t_img], + tag=f"C{c}_{self._mode}_GraphTrav", + store_folder=f"{self._mode}/graph_trav", ) nr_channel_reco = graph[b].x.shape[1] @@ -249,14 +291,27 @@ def visu(self, graph: Data, res: torch.tensor, confidence: torch.tensor): # # Visualize Graph with Segmentation c1 = self._visualizer.plot_traversability_graph_on_seg( - conf, seg, graph[b], center, img, not_log=True, colorize_invalid_centers=True + conf, + seg, + graph[b], + center, + img, + not_log=True, + colorize_invalid_centers=True, ) c2 = self._visualizer.plot_traversability_graph_on_seg( - graph[b].y_valid.type(torch.float32), seg, graph[b], center, img, not_log=True + graph[b].y_valid.type(torch.float32), + seg, + graph[b], + center, + img, + not_log=True, ) c_img = self._visualizer.plot_image(img, not_log=True) self._visualizer.plot_list( - imgs=[c1, c2, c_img], tag=f"C{c}_{self._mode}_GraphConf", store_folder=f"{self._mode}/graph_conf" + imgs=[c1, c2, c_img], + tag=f"C{c}_{self._mode}_GraphConf", + store_folder=f"{self._mode}/graph_conf", ) # if self._mode == "test": @@ -317,10 +372,18 @@ def visu(self, graph: Data, res: torch.tensor, confidence: torch.tensor): reco_loss = F.mse_loss(pred[:, -nr_channel_reco:], graph[b].x, reduction="none").mean(dim=1) self._visualizer.plot_histogram( - reco_loss, graph[b].y, mean, std, tag=f"C{c}_{self._mode}__confidence_generator_prop" + reco_loss, + graph[b].y, + mean, + std, + tag=f"C{c}_{self._mode}__confidence_generator_prop", ) if hasattr(graph[b], "y_gt"): self._visualizer.plot_histogram( - reco_loss, graph[b].y_gt, mean, std, tag=f"C{c}_{self._mode}__confidence_generator_gt" + reco_loss, + graph[b].y_gt, + mean, + std, + tag=f"C{c}_{self._mode}__confidence_generator_gt", ) diff --git a/scripts/ablations/sklearn_ablation.py b/.deprecated/scripts/ablations/sklearn_ablation.py similarity index 87% rename from scripts/ablations/sklearn_ablation.py rename to .deprecated/scripts/ablations/sklearn_ablation.py index 68704184..c2d3636e 100644 --- a/scripts/ablations/sklearn_ablation.py +++ b/.deprecated/scripts/ablations/sklearn_ablation.py @@ -1,7 +1,6 @@ +raise ValueError("TODO: Not tested with new configuration!") # Use the same config to load the data using the dataloader -from wild_visual_navigation.learning.dataset import get_ablation_module -from wild_visual_navigation.learning.utils import load_env -from wild_visual_navigation import WVN_ROOT_DIR +from wild_visual_navigation.dataset import get_ablation_module from wild_visual_navigation.cfg import ExperimentParams import torch from torchmetrics import Accuracy, AUROC @@ -10,17 +9,14 @@ patch_sklearn() -from sklearn.neighbors import KNeighborsClassifier from sklearn.svm import SVC from sklearn.ensemble import RandomForestClassifier -from sklearn.neural_network import MLPClassifier import numpy as np import copy import os import pickle from pathlib import Path -env = load_env() number_training_runs = 5 test_all_datasets = False @@ -45,7 +41,7 @@ "training_in_memory": True, } train_dataset, val_dataset, test_datasets = get_ablation_module( - **ablation_data_module, perugia_root=env["perugia_root"] + **ablation_data_module, perugia_root=exp.env.perugia_root ) test_scenes = [a.dataset.env for a in test_datasets] @@ -83,7 +79,6 @@ for model_name, v in models.items(): run_results = {} for run in range(number_training_runs): - test_auroc_gt_image = AUROC("binary") test_auroc_prop_image = AUROC("binary") test_acc_gt_image = Accuracy("binary") @@ -127,8 +122,14 @@ for features_test, y_gt, y_prop, test_scene in zip(features_tests, y_gts, y_props, test_scenes): y_pred = v.predict_proba(features_test) - test_auroc_gt.update(torch.from_numpy(y_pred[:, 1]), torch.from_numpy(y_gt).type(torch.long)) - test_auroc_prop.update(torch.from_numpy(y_pred[:, 1]), torch.from_numpy(y_prop).type(torch.long)) + test_auroc_gt.update( + torch.from_numpy(y_pred[:, 1]), + torch.from_numpy(y_gt).type(torch.long), + ) + test_auroc_prop.update( + torch.from_numpy(y_pred[:, 1]), + torch.from_numpy(y_prop).type(torch.long), + ) res = { "test_auroc_gt_seg": test_auroc_gt.compute().item(), @@ -145,10 +146,11 @@ model_results[model_name] = copy.deepcopy(run_results) results_epoch[scene] = copy.deepcopy(model_results) - ws = os.environ["ENV_WORKSTATION_NAME"] + ws = os.environ.get("ENV_WORKSTATION_NAME", "default") # Store epoch output to disk. p = os.path.join( - env["base"], f"ablations/classicial_learning_ablation_{ws}/classicial_learning_ablation_test_results.pkl" + exp.env.results, + f"ablations/classicial_learning_ablation_{ws}/classicial_learning_ablation_test_results.pkl", ) print(results_epoch) Path(p).parent.mkdir(parents=True, exist_ok=True) @@ -156,7 +158,7 @@ try: os.remove(p) except OSError as error: - pass + print(error) with open(p, "wb") as handle: pickle.dump(results_epoch, handle, protocol=pickle.HIGHEST_PROTOCOL) diff --git a/scripts/ablations/stepwise_ablation.py b/.deprecated/scripts/ablations/stepwise_ablation.py similarity index 85% rename from scripts/ablations/stepwise_ablation.py rename to .deprecated/scripts/ablations/stepwise_ablation.py index 2b94f184..cb3e486a 100644 --- a/scripts/ablations/stepwise_ablation.py +++ b/.deprecated/scripts/ablations/stepwise_ablation.py @@ -1,16 +1,14 @@ +raise ValueError("TODO: Not tested with new configuration!") import os import torch from pathlib import Path -import yaml import time import pickle import copy import argparse import shutil -from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.learning.utils import load_yaml, load_env from wild_visual_navigation.cfg import ExperimentParams -from wild_visual_navigation.learning.general import training_routine +from wild_visual_navigation.general import training_routine if __name__ == "__main__": """Test how much time and data it takes for a model to convergee on a scene. @@ -18,7 +16,6 @@ After training we run for all model checkpoints the test routine. """ exp = ExperimentParams() - env = load_env() parser = argparse.ArgumentParser() parser.add_argument("--output_key", type=str, default="learning_curve", help="Name of the run.") @@ -29,7 +26,10 @@ parser.add_argument("--test_all_datasets", dest="test_all_datasets", action="store_true", help="") parser.set_defaults(test_all_datasets=False) parser.add_argument( - "--scenes", default="forest,hilly,grassland", type=str, help="List of scenes seperated by comma without spaces." + "--scenes", + default="forest,hilly,grassland", + type=str, + help="List of scenes seperated by comma without spaces.", ) parser.add_argument("--store_model_every_n_steps", type=int, default=100) parser.add_argument("--max_steps", type=int, default=1000) @@ -65,8 +65,8 @@ exp.visu.val = 0 exp.visu.test = 0 exp.verify_params() - ws = os.environ["ENV_WORKSTATION_NAME"] - exp.general.model_path = os.path.join(env["base"], f"ablations/{output_key}_{ws}") + ws = os.environ.get("ENV_WORKSTATION_NAME", "default") + exp.general.model_path = os.path.join(exp.env.results, f"ablations/{output_key}_{ws}") # If check_val_every_n_epoch in the current setting the test dataloader is used for validation. # All results during validation are stored and returned by the training routine. @@ -79,7 +79,9 @@ Path(exp.general.model_path).mkdir(parents=True, exist_ok=True) percent = range( - data_start_percentage, data_stop_percentage + data_percentage_increment, data_percentage_increment + data_start_percentage, + data_stop_percentage + data_percentage_increment, + data_percentage_increment, ) with open(os.path.join(exp.general.model_path, "experiment_params.pkl"), "wb") as handle: @@ -106,7 +108,7 @@ try: os.remove(p) except OSError as error: - pass + print(error) with open(p, "wb") as handle: pickle.dump(results_epoch, handle, protocol=pickle.HIGHEST_PROTOCOL) @@ -118,11 +120,18 @@ p_inter = os.path.join(exp.general.model_path, f"{output_key}_steps.pkl") total_paths = [str(s) for s in Path(exp.general.model_path).rglob("*.pt")] print("total_paths", len(total_paths)) - import time st = time.time() for j, p in enumerate(Path(exp.general.model_path).rglob("*.pt")): - print("XXXXXXXXXXXXXXXXXXXXX PROGRESS ", j, " of ", len(total_paths), " in ", time.time() - st, " seconds.") + print( + "XXXXXXXXXXXXXXXXXXXXX PROGRESS ", + j, + " of ", + len(total_paths), + " in ", + time.time() - st, + " seconds.", + ) res = str(p).split("/")[-1].split("_") scene, percentage, run, steps = res[-4], res[-3], res[-2], res[-1] @@ -132,7 +141,14 @@ res, _ = training_routine(exp, seed=run) results_step.append( - {"scene": scene, "percentage": percentage, "run": run, "steps": steps, "results": res, "model_path": str(p)} + { + "scene": scene, + "percentage": percentage, + "run": run, + "steps": steps, + "results": res, + "model_path": str(p), + } ) torch.cuda.empty_cache() @@ -141,7 +157,7 @@ try: os.remove(p_inter.replace(".pkl", f"_inter_{j}.pkl")) except OSError as error: - pass + print(error) with open(p_inter.replace(".pkl", f"_inter_{j}.pkl"), "wb") as handle: pickle.dump(results_step, handle, protocol=pickle.HIGHEST_PROTOCOL) @@ -151,7 +167,7 @@ try: os.remove(p) except OSError as error: - pass + print(error) with open(p, "wb") as handle: pickle.dump(results_step, handle, protocol=pickle.HIGHEST_PROTOCOL) diff --git a/scripts/ablations/training_ablation.py b/.deprecated/scripts/ablations/training_ablation.py similarity index 84% rename from scripts/ablations/training_ablation.py rename to .deprecated/scripts/ablations/training_ablation.py index d554e73c..b12b548d 100644 --- a/scripts/ablations/training_ablation.py +++ b/.deprecated/scripts/ablations/training_ablation.py @@ -1,21 +1,18 @@ +raise ValueError("TODO: Not tested with new configuration!") import os import torch from pathlib import Path -import yaml -import time import pickle import copy from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.learning.utils import load_yaml +from wild_visual_navigation.utils import load_yaml from wild_visual_navigation.cfg import ExperimentParams -from wild_visual_navigation.learning.general import training_routine +from wild_visual_navigation.general import training_routine from wild_visual_navigation.utils import override_params import argparse -import logging import shutil from dataclasses import asdict -import copy if __name__ == "__main__": """Test how much time and data it takes for a model to convergee on a scene. @@ -29,16 +26,30 @@ """ parser = argparse.ArgumentParser() - parser.add_argument("--ablation_type", type=str, default="network", help="Folder containing the ablation configs.") + parser.add_argument( + "--ablation_type", + type=str, + default="network", + help="Folder containing the ablation configs.", + ) parser.add_argument("--number_training_runs", type=int, default=1, help="Number of run per config.") parser.add_argument( - "--test_all_datasets", dest="test_all_datasets", action="store_true", help="Test on all datasets." + "--test_all_datasets", + dest="test_all_datasets", + action="store_true", + help="Test on all datasets.", ) parser.add_argument( - "--store_final_model", dest="store_final_model", action="store_true", help="store_final_model on all datasets." + "--store_final_model", + dest="store_final_model", + action="store_true", + help="store_final_model on all datasets.", ) parser.add_argument( - "--scenes", default="forest,hilly,grassland", type=str, help="List of scenes seperated by comma without spaces." + "--scenes", + default="forest,hilly,grassland", + type=str, + help="List of scenes seperated by comma without spaces.", ) parser.add_argument("--special_key", type=str, default="", help="Test on all datasets.") parser.set_defaults(test_all_datasets=False) @@ -80,7 +91,7 @@ def get_exp(args, model_path, p, scene, stored_params): number_training_runs = args.number_training_runs folder = args.ablation_type special_key = args.special_key - ws = os.environ["ENV_WORKSTATION_NAME"] + ws = os.environ.get("ENV_WORKSTATION_NAME", "default") model_path = os.path.join(WVN_ROOT_DIR, f"results/ablations/{folder}_ablation_{ws}") shutil.rmtree(model_path, ignore_errors=True) Path(model_path).mkdir(parents=True, exist_ok=True) @@ -122,7 +133,7 @@ def get_exp(args, model_path, p, scene, stored_params): try: os.remove(p) except OSError as error: - pass + print(error) with open(p, "wb") as handle: pickle.dump(results_epoch, handle, protocol=pickle.HIGHEST_PROTOCOL) diff --git a/scripts/dataset_generation/convert_test_images_for_labeling.py b/.deprecated/scripts/dataset_generation/convert_test_images_for_labeling.py similarity index 69% rename from scripts/dataset_generation/convert_test_images_for_labeling.py rename to .deprecated/scripts/dataset_generation/convert_test_images_for_labeling.py index d87ac0d3..2c40f01f 100644 --- a/scripts/dataset_generation/convert_test_images_for_labeling.py +++ b/.deprecated/scripts/dataset_generation/convert_test_images_for_labeling.py @@ -1,5 +1,5 @@ import numpy as np -from wild_visual_navigation.utils import perguia_dataset, ROOT_DIR +from wild_visual_navigation.utils import ROOT_DIR import os @@ -24,4 +24,11 @@ res = torch.load(p) img = Image.fromarray(np.uint8(res.permute(1, 2, 0).cpu().numpy() * 255)) - img.save(os.path.join(ROOT_DIR, "wvn_output/labeling", env, l.split("/")[-1].replace(".pt", ".png"))) + img.save( + os.path.join( + ROOT_DIR, + "wvn_output/labeling", + env, + l.split("/")[-1].replace(".pt", ".png"), + ) + ) diff --git a/scripts/dataset_generation/create_gnn_dataset.py b/.deprecated/scripts/dataset_generation/create_gnn_dataset.py similarity index 98% rename from scripts/dataset_generation/create_gnn_dataset.py rename to .deprecated/scripts/dataset_generation/create_gnn_dataset.py index 5b51f10d..e4d5b7aa 100644 --- a/scripts/dataset_generation/create_gnn_dataset.py +++ b/.deprecated/scripts/dataset_generation/create_gnn_dataset.py @@ -7,8 +7,8 @@ import argparse import cv2 - -from torch_geometric.data import Data +# TODO +# from torch_geometric.data import Data if __name__ == "__main__": """Converts a folder with images to a torch_geometric dataformat. diff --git a/scripts/dataset_generation/create_train_val_test_lists.py b/.deprecated/scripts/dataset_generation/create_train_val_test_lists.py similarity index 99% rename from scripts/dataset_generation/create_train_val_test_lists.py rename to .deprecated/scripts/dataset_generation/create_train_val_test_lists.py index cce39ea6..fc93b324 100644 --- a/scripts/dataset_generation/create_train_val_test_lists.py +++ b/.deprecated/scripts/dataset_generation/create_train_val_test_lists.py @@ -17,7 +17,6 @@ scenes[d["env"]] = [] for d in perguia_dataset: - p = [ str(s).replace(ROOT_DIR + "/", "") for s in Path(ROOT_DIR, "wvn_output", d["name"].replace("mission_data/", ""), "image").rglob("*.pt") diff --git a/scripts/dataset_generation/download_bitmaps_from_segments_ai.py b/.deprecated/scripts/dataset_generation/download_bitmaps_from_segments_ai.py similarity index 100% rename from scripts/dataset_generation/download_bitmaps_from_segments_ai.py rename to .deprecated/scripts/dataset_generation/download_bitmaps_from_segments_ai.py diff --git a/scripts/dataset_generation/extract_all.py b/.deprecated/scripts/dataset_generation/extract_all.py similarity index 100% rename from scripts/dataset_generation/extract_all.py rename to .deprecated/scripts/dataset_generation/extract_all.py diff --git a/.deprecated/scripts/dataset_generation/extract_binary_images_and_labels.py b/.deprecated/scripts/dataset_generation/extract_binary_images_and_labels.py new file mode 100644 index 00000000..a7f77d14 --- /dev/null +++ b/.deprecated/scripts/dataset_generation/extract_binary_images_and_labels.py @@ -0,0 +1,220 @@ +import sys +import os +import time +from tqdm import tqdm +import subprocess +import yaml + +from tf_bag import BagTfTransformer +import rospy +import rosparam +from sensor_msgs.msg import CameraInfo +import rosbag + +from postprocessing_tools_ros.merging import merge_bags_single + +# from py_image_proc_cuda import ImageProcCuda +# from cv_bridge import CvBridge + +from wild_visual_navigation import WVN_ROOT_DIR +from wild_visual_navigation.utils import perugia_dataset, ROOT_DIR + +sys.path.append(f"{WVN_ROOT_DIR}/wild_visual_navigation_ros/scripts") +from wild_visual_navigation_node import WvnRosInterface + +sys.path.append(f"{WVN_ROOT_DIR}/wild_visual_navigation_anymal/scripts") +from anymal_msg_converter_node import anymal_msg_callback + +# We need to do the following +# 1. Debayering cam4 -> send via ros and wait for result ("correct params") +# 2. anymal_state_topic -> /wild_visual_navigation_node/robot_state +# 3. Feed into wild_visual_navigation_node ("correct params") +# # Iterate rosbags + + +def get_bag_info(rosbag_path: str) -> dict: + # This queries rosbag info using subprocess and get the YAML output to parse the topics + info_dict = yaml.safe_load( + subprocess.Popen(["rosbag", "info", "--yaml", rosbag_path], stdout=subprocess.PIPE).communicate()[0] + ) + return info_dict + + +class BagTfTransformerWrapper: + def __init__(self, bag): + self.tf_listener = BagTfTransformer(bag) + + def waitForTransform(self, parent_frame, child_frame, time, duration): + return self.tf_listener.waitForTransform(parent_frame, child_frame, time) + + def lookupTransform(self, parent_frame, child_frame, time): + try: + return self.tf_listener.lookupTransform(parent_frame, child_frame, time) + except Exception: + return (None, None) + + +def do(n, dry_run): + d = perugia_dataset[n] + + if bool(dry_run): + print(d) + return + + s = os.path.join(ROOT_DIR, d["name"]) + + valid_topics = ["/state_estimator/anymal_state", "/wide_angle_camera_front/img_out"] + + rosbags = [ + "/home/rschmid/RosBags/6/images.bag", + "/home/rschmid/RosBags/6/2023-03-02-11-13-08_anymal-d020-lpc_mission_0.bag", + "/home/rschmid/RosBags/6/2023-03-02-11-13-08_anymal-d020-lpc_mission_1.bag", + ] + + output_bag_wvn = s + "_wvn.bag" + output_bag_tf = s + "_tf.bag" + + if not os.path.exists(output_bag_tf): + total_included_count, total_skipped_count = merge_bags_single( + input_bag=rosbags, + output_bag=output_bag_tf, + topics="/tf /tf_static", + verbose=True, + ) + if not os.path.exists(output_bag_wvn): + total_included_count, total_skipped_count = merge_bags_single( + input_bag=rosbags, + output_bag=output_bag_wvn, + topics=" ".join(valid_topics), + verbose=True, + ) + + # Setup WVN node + rospy.init_node("wild_visual_navigation_node") + + mission = s.split("/")[-1] + + running_store_folder = f"/home/rschmid/RosBags/output/{mission}" + + if os.path.exists(running_store_folder): + print("Folder already exists, but proceeding!") + # return + + rosparam.set_param("wild_visual_navigation_node/mode", "extract_labels") + rosparam.set_param("wild_visual_navigation_node/running_store_folder", running_store_folder) + + # for supervision callback + state_msg_valid = False + # desired_twist_msg_valid = False + + wvn_ros_interface = WvnRosInterface() + print("-" * 80) + + print("start loading tf") + tf_listener = BagTfTransformerWrapper(output_bag_tf) + wvn_ros_interface.setup_rosbag_replay(tf_listener) + print("done loading tf") + + # Höngg new + info_msg = CameraInfo() + info_msg.height = 1080 + info_msg.width = 1440 + info_msg.distortion_model = "equidistant" + info_msg.D = [ + 0.4316922809468283, + 0.09279900476637248, + -0.4010909691803734, + 0.4756163338479413, + ] + info_msg.K = [ + 575.6050407221768, + 0.0, + 745.7312198525915, + 0.0, + 578.564849365178, + 519.5207040671075, + 0.0, + 0.0, + 1.0, + ] + info_msg.P = [ + 575.6050407221768, + 0.0, + 745.7312198525915, + 0.0, + 0.0, + 578.564849365178, + 519.5207040671075, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + ] + info_msg.R = [1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0] + + rosbag_info_dict = get_bag_info(output_bag_wvn) + total_msgs = sum([x["messages"] for x in rosbag_info_dict["topics"] if x["topic"] in valid_topics]) + total_time_img = 0 + total_time_state = 0 + n = 0 + + with rosbag.Bag(output_bag_wvn, "r") as bag: + start_time = rospy.Time.from_sec(bag.get_start_time() + d["start"]) + end_time = rospy.Time.from_sec(bag.get_start_time() + d["stop"]) + with tqdm( + total=total_msgs, + desc="Total", + colour="green", + position=1, + bar_format="{desc:<13}{percentage:3.0f}%|{bar:20}{r_bar}", + ) as pbar: + for topic, msg, ts in bag.read_messages(topics=None, start_time=start_time, end_time=end_time): + if rospy.is_shutdown(): + return + pbar.update(1) + st = time.time() + if topic == "/state_estimator/anymal_state": + state_msg = anymal_msg_callback(msg, return_msg=True) + state_msg_valid = True + + elif topic == "/wide_angle_camera_front/img_out": + image_msg = msg + # print("Received /wide_angle_camera_front/img_out") + + info_msg.header = msg.header + camera_options = {} + camera_options["name"] = "wide_angle_camera_front" + camera_options["use_for_training"] = True + + info_msg.header = msg.header + try: + wvn_ros_interface.image_callback(image_msg, info_msg, camera_options) + except Exception as e: + print("Bad image_callback", e) + + total_time_img += time.time() - st + # print(f"image time: {total_time_img} , state time: {total_time_state}") + # print("add image") + if state_msg_valid: + try: + wvn_ros_interface.robot_state_callback(state_msg, None) + except Exception as e: + print("Bad robot_state callback ", e) + + state_msg_valid = False + total_time_state += time.time() - st + + print("Finished with converting the dataset") + rospy.signal_shutdown("stop the node") + + +if __name__ == "__main__": + import argparse + + parser = argparse.ArgumentParser() + parser.add_argument("--n", type=int, default=0, help="Store data") + parser.add_argument("--dry_run", type=int, default=0, help="Store data") + args = parser.parse_args() + + do(args.n, args.dry_run) diff --git a/scripts/dataset_generation/extract_features_for_dataset.py b/.deprecated/scripts/dataset_generation/extract_features_for_dataset.py similarity index 94% rename from scripts/dataset_generation/extract_features_for_dataset.py rename to .deprecated/scripts/dataset_generation/extract_features_for_dataset.py index b5c10fa5..617919ea 100644 --- a/scripts/dataset_generation/extract_features_for_dataset.py +++ b/.deprecated/scripts/dataset_generation/extract_features_for_dataset.py @@ -3,7 +3,9 @@ import os from pathlib import Path import torch -from torch_geometric.data import Data + +# TODO +# from torch_geometric.data import Data from wild_visual_navigation.utils import KLTTrackerOpenCV import numpy as np from tqdm import tqdm @@ -62,7 +64,13 @@ # ) fes["slic200_dino112_8"] = FeatureExtractor( - device, "slic", "dino", 112, model_type="vit_small", patch_size=8, slic_num_components=200 + device, + "slic", + "dino", + 112, + model_type="vit_small", + patch_size=8, + slic_num_components=200, ) fes["grid32_dino112_8"] = FeatureExtractor( device, "slic", "dino", 112, model_type="vit_small", patch_size=8, cell_size=32 @@ -87,7 +95,13 @@ form = "{desc:<13}{percentage:3.0f}%|{bar:20}{r_bar}" - with tqdm(total=len(needed_images), desc="Total", colour="green", position=1, bar_format=form) as pbar: + with tqdm( + total=len(needed_images), + desc="Total", + colour="green", + position=1, + bar_format=form, + ) as pbar: for m_nr, mission in enumerate(mission_folders): assert os.path.isdir( os.path.join(mission, "image") @@ -151,7 +165,6 @@ key = image.split("/")[-1][:-3] # remove .pt img = torch.load(image) for name, feature_extractor in fes.items(): - edges, feat, seg, center = feature_extractor.extract(img.clone()[None], return_centers=True) filename = os.path.join(mission, "features", name, "seg", key + ".pt") @@ -209,7 +222,8 @@ from wild_visual_navigation.visu import LearningVisualizer visu = LearningVisualizer( - p_visu=os.path.join(WVN_ROOT_DIR, "results/extract_features"), store=True + p_visu=os.path.join(WVN_ROOT_DIR, "results/extract_features"), + store=True, ) visu.plot_sparse_optical_flow( pre_pos, @@ -261,7 +275,12 @@ feature_edges_buffer[name] = edges feature_buffer[name] = feat else: - data = Data(x=feat, edge_index=edges, y=supervision_signal, y_valid=supervision_signal_valid) + data = Data( + x=feat, + edge_index=edges, + y=supervision_signal, + y_valid=supervision_signal_valid, + ) filename = os.path.join(mission, "features", name, "graph", key + ".pt") if store and store_idx: diff --git a/scripts/dataset_generation/extract_images_and_labels.py b/.deprecated/scripts/dataset_generation/extract_images_and_labels.py similarity index 87% rename from scripts/dataset_generation/extract_images_and_labels.py rename to .deprecated/scripts/dataset_generation/extract_images_and_labels.py index 1c2702b2..6090c0c6 100644 --- a/scripts/dataset_generation/extract_images_and_labels.py +++ b/.deprecated/scripts/dataset_generation/extract_images_and_labels.py @@ -12,7 +12,7 @@ from sensor_msgs.msg import Image, CameraInfo import rosbag -from postprocessing_tools_ros.merging import merge_bags_single, merge_bags_all +from postprocessing_tools_ros.merging import merge_bags_single # from py_image_proc_cuda import ImageProcCuda # from cv_bridge import CvBridge @@ -51,7 +51,7 @@ def waitForTransform(self, parent_frame, child_frame, time, duration): def lookupTransform(self, parent_frame, child_frame, time): try: return self.tf_listener.lookupTransform(parent_frame, child_frame, time) - except: + except Exception: return (None, None) @@ -64,7 +64,11 @@ def do(n, dry_run): s = os.path.join(ROOT_DIR, d["name"]) - valid_topics = ["/state_estimator/anymal_state", "/alphasense_driver_ros/cam4", "/log/state/desiredRobotTwist"] + valid_topics = [ + "/state_estimator/anymal_state", + "/alphasense_driver_ros/cam4", + "/log/state/desiredRobotTwist", + ] # Merge rosbags if necessary rosbags = [ @@ -76,7 +80,7 @@ def do(n, dry_run): ] try: rosbags.sort(key=lambda x: int(x.split("/")[-1][-5])) - except: + except Exception: pass output_bag_wvn = s + "_wvn.bag" @@ -86,11 +90,17 @@ def do(n, dry_run): # jetson locomotion robot if not os.path.exists(output_bag_tf): total_included_count, total_skipped_count = merge_bags_single( - input_bag=tf_bags, output_bag=output_bag_tf, topics="/tf /tf_static", verbose=True + input_bag=tf_bags, + output_bag=output_bag_tf, + topics="/tf /tf_static", + verbose=True, ) if not os.path.exists(output_bag_wvn): total_included_count, total_skipped_count = merge_bags_single( - input_bag=rosbags, output_bag=output_bag_wvn, topics=" ".join(valid_topics), verbose=True + input_bag=rosbags, + output_bag=output_bag_wvn, + topics=" ".join(valid_topics), + verbose=True, ) # Setup WVN node @@ -109,7 +119,7 @@ def do(n, dry_run): rosparam.set_param("wild_visual_navigation_node/mode", "extract_labels") rosparam.set_param("wild_visual_navigation_node/extraction_store_folder", extraction_store_folder) - # for proprioceptive callback + # for supervision callback state_msg_valid = False desired_twist_msg_valid = False @@ -126,7 +136,17 @@ def do(n, dry_run): info_msg.height = 540 info_msg.width = 720 info_msg.distortion_model = "plumb_bob" - info_msg.K = [347.548139773951, 0.0, 342.454373227748, 0.0, 347.434712422309, 271.368057185649, 0.0, 0.0, 1.0] + info_msg.K = [ + 347.548139773951, + 0.0, + 342.454373227748, + 0.0, + 347.434712422309, + 271.368057185649, + 0.0, + 0.0, + 1.0, + ] info_msg.P = [ 347.548139773951, 0.0, @@ -159,7 +179,7 @@ def do(n, dry_run): position=1, bar_format="{desc:<13}{percentage:3.0f}%|{bar:20}{r_bar}", ) as pbar: - for (topic, msg, ts) in bag.read_messages(topics=None, start_time=start_time, end_time=end_time): + for topic, msg, ts in bag.read_messages(topics=None, start_time=start_time, end_time=end_time): pbar.update(1) st = time.time() if topic == "/state_estimator/anymal_state": @@ -177,10 +197,12 @@ def do(n, dry_run): # Change this for service call try: image_msg = rospy.wait_for_message( - "/alphasense_driver_ros/cam4/debayered", Image, timeout=0.01 + "/alphasense_driver_ros/cam4/debayered", + Image, + timeout=0.01, ) suc = True - except Exception as e: + except Exception: suc = False pass if suc: diff --git a/scripts/dataset_generation/validate_extract_images_and_labels.py b/.deprecated/scripts/dataset_generation/validate_extract_images_and_labels.py similarity index 90% rename from scripts/dataset_generation/validate_extract_images_and_labels.py rename to .deprecated/scripts/dataset_generation/validate_extract_images_and_labels.py index 8eb1f358..63f922d7 100644 --- a/scripts/dataset_generation/validate_extract_images_and_labels.py +++ b/.deprecated/scripts/dataset_generation/validate_extract_images_and_labels.py @@ -46,7 +46,14 @@ def get_bag_info(rosbag_path: str) -> dict: p = str(os.path.join(ouput_dir, d["name"].replace("mission_data/", ""), "image", dele)) if os.path.exists(p): os.remove(p) - p = str(os.path.join(ouput_dir, d["name"].replace("mission_data/", ""), "supervision_mask", dele)) + p = str( + os.path.join( + ouput_dir, + d["name"].replace("mission_data/", ""), + "supervision_mask", + dele, + ) + ) if os.path.exists(p): os.remove(p) diff --git a/scripts/dataset_generation/visu.py b/.deprecated/scripts/dataset_generation/visu.py similarity index 100% rename from scripts/dataset_generation/visu.py rename to .deprecated/scripts/dataset_generation/visu.py diff --git a/.deprecated/scripts/train.py b/.deprecated/scripts/train.py new file mode 100644 index 00000000..32d7b16a --- /dev/null +++ b/.deprecated/scripts/train.py @@ -0,0 +1,8 @@ +from wild_visual_navigation.cfg import ExperimentParams +from wild_visual_navigation.general import training_routine + +from omegaconf import OmegaConf + +if __name__ == "__main__": + params = OmegaConf.structured(ExperimentParams) + training_routine(params) diff --git a/.deprecated/scripts/train_optuna.py b/.deprecated/scripts/train_optuna.py new file mode 100644 index 00000000..acd0fb62 --- /dev/null +++ b/.deprecated/scripts/train_optuna.py @@ -0,0 +1,75 @@ +import optuna +from argparse import ArgumentParser +import copy +import os +import neptune.integrations.optuna as optuna_utils +import torch + +from wild_visual_navigation.cfg import ExperimentParams +from wild_visual_navigation.general import training_routine +from wild_visual_navigation.utils import get_neptune_run +from omegaconf import read_write, OmegaConf + + +def objective(trial, params: ExperimentParams): + exp = copy.deepcopy(params) + + with read_write(exp): + # Parameter to sweep + exp.optimizer.lr = trial.suggest_float("lr", 0.0001, 0.01, log=True) + exp.loss.w_trav = trial.suggest_float("w_trav", 0.0, 1.0) + exp.loss.w_temp = trial.suggest_float("w_temp", 0.0, 1.0) + exp.loss.w_reco = trial.suggest_float("w_reco", 0.0, 1.0) + exp.loss.anomaly_balanced = trial.suggest_categorical("anomaly_balanced", [True]) + + res, _ = training_routine(exp) + + torch.cuda.empty_cache() + return list(res.values())[0]["test_auroc_gt_image"] + + +if __name__ == "__main__": + parser = ArgumentParser() + + parser.add_argument("--name", type=str, default="sweep_loss_function", help="Name of sweep") + parser.add_argument("--n_trials", type=int, default=100, help="Number Trials") + + args = parser.parse_args() + params = OmegaConf.structured(ExperimentParams) + + with read_write(): + params.general.name = os.path.join(args.name, params.general.name) + + run = get_neptune_run( + neptune_project_name=params.logger.neptune_project_name, + tags=["optuna", args.name], + ) + + neptune_callback = optuna_utils.NeptuneCallback( + run, + plots_update_freq=2, + log_plot_slice=True, + log_plot_contour=True, + ) + + params.logger.name = "skip" + params.trainer.enable_checkpointing = False + params.cb_checkpoint.active = False + params.visu.train = 0 + params.visu.val = 0 + params.visu.test = 0 + params.trainer.check_val_every_n_epoch = 100000 + params.general.store_model_every_n_steps = None + params.ablation_data_module.training_in_memory = True + params.trainer.max_steps = 1000 + params.trainer.max_epochs = None + params.general.log_to_disk = False + params.trainer.progress_bar_refresh_rate = 0 + params.trainer.weights_summary = None + params.trainer.enable_progress_bar = False + + binded_objective = lambda trial: objective(trial=trial, experiment=args.experiment) + study = optuna.create_study(direction=optuna.study.StudyDirection.MAXIMIZE) + study.optimize(binded_objective, n_trials=args.n_trials, callbacks=[neptune_callback]) + + trial = study.best_trial diff --git a/wild_visual_navigation/utils/dataset_info.py b/.deprecated/utils/dataset_info.py similarity index 95% rename from wild_visual_navigation/utils/dataset_info.py rename to .deprecated/utils/dataset_info.py index 2d680f85..4ff388da 100644 --- a/wild_visual_navigation/utils/dataset_info.py +++ b/.deprecated/utils/dataset_info.py @@ -133,7 +133,7 @@ def dataset_play(env="forest", mode="train", nr=0, rate=1.0, ignore_tf=False, ot stop = d["stop"] duration = stop - start bags = os.path.join(ROOT_DIR, d["name"]) + "/*.bag" - comment = d["comment"] + # comment = d["comment"] if bool(ignore_tf): bags += " /tf:=/tf_trash" cmd = f"rosbag play -s {start} -u {duration} -r {rate} {other_args} {bags}" @@ -153,5 +153,10 @@ def dataset_play(env="forest", mode="train", nr=0, rate=1.0, ignore_tf=False, ot args = parser.parse_args() dataset_play( - env=args.env, mode=args.mode, nr=args.nr, rate=args.rate, ignore_tf=args.ignore_tf, other_args=args.other_args + env=args.env, + mode=args.mode, + nr=args.nr, + rate=args.rate, + ignore_tf=args.ignore_tf, + other_args=args.other_args, ) diff --git a/wild_visual_navigation/utils/klt_tracker.py b/.deprecated/utils/klt_tracker.py similarity index 87% rename from wild_visual_navigation/utils/klt_tracker.py rename to .deprecated/utils/klt_tracker.py index 0acef608..4a02f330 100644 --- a/wild_visual_navigation/utils/klt_tracker.py +++ b/.deprecated/utils/klt_tracker.py @@ -1,6 +1,5 @@ import torch import kornia -from wild_visual_navigation.utils import Timer def interp2_torch_batch(v, xq, yq): @@ -48,11 +47,17 @@ class KLTTrackerOpenCV(torch.nn.Module): def __init__(self, device="cpu", window_size=25, levels=15) -> None: super().__init__() self.lk_params = dict( - winSize=(7, 7), maxLevel=2, criteria=(cv.TERM_CRITERIA_EPS | cv.TERM_CRITERIA_COUNT, 10, 0.03) + winSize=(7, 7), + maxLevel=2, + criteria=(cv.TERM_CRITERIA_EPS | cv.TERM_CRITERIA_COUNT, 10, 0.03), ) def forward( - self, t_startXs: torch.Tensor, t_startYs: torch.Tensor, img_prev: torch.Tensor, img_next: torch.Tensor + self, + t_startXs: torch.Tensor, + t_startYs: torch.Tensor, + img_prev: torch.Tensor, + img_next: torch.Tensor, ) -> torch.Tensor: """_summary_ @@ -82,11 +87,16 @@ def __init__(self, device="cpu", window_size=25, levels=15) -> None: self.window_size = torch.tensor(window_size, device=device) self.levels = levels self.t_mesh_x, self.t_mesh_y = torch.meshgrid( - torch.arange(self.window_size, device=self.device), torch.arange(self.window_size, device=self.device) + torch.arange(self.window_size, device=self.device), + torch.arange(self.window_size, device=self.device), ) def forward( - self, t_startXs: torch.Tensor, t_startYs: torch.Tensor, img_prev: torch.Tensor, img_next: torch.Tensor + self, + t_startXs: torch.Tensor, + t_startYs: torch.Tensor, + img_prev: torch.Tensor, + img_next: torch.Tensor, ) -> torch.Tensor: """_summary_ @@ -113,7 +123,12 @@ def forward( t_newYs = torch.full(t_startYs_flat.shape, -1, dtype=torch.float32, device=self.device) t_newXs, t_newYs = self.estimateFeatureTranslationBatch( - t_startXs_flat, t_startYs_flat, t_Ix, t_Iy, (t_img_prev_gray * 255)[0], (t_img_next_gray * 255)[0] + t_startXs_flat, + t_startYs_flat, + t_Ix, + t_Iy, + (t_img_prev_gray * 255)[0], + (t_img_next_gray * 255)[0], ) t_newXs = torch.reshape(t_newXs, t_startXs.shape) @@ -149,7 +164,9 @@ def estimateFeatureTranslationBatch(self, t_startX, t_startY, t_Ix, t_Iy, t_img1 t_coor = torch.stack((t_mesh_x_flat, t_mesh_y_flat), dim=1) t_I2_value = interp2_torch_batch( - t_img2_gray.clone(), t_coor[:, 0, :].contiguous(), t_coor[:, 1, :].contiguous() + t_img2_gray.clone(), + t_coor[:, 0, :].contiguous(), + t_coor[:, 1, :].contiguous(), ).contiguous() t_Ip = (t_I2_value - t_I1_value)[:, :, None] @@ -162,10 +179,6 @@ def estimateFeatureTranslationBatch(self, t_startX, t_startY, t_Ix, t_Iy, t_img1 if __name__ == "__main__": - import cv2 as cv - from PIL import Image, ImageDraw - import numpy as np - # TODO write a test for it pre = torch.load("/home/jonfrey/git/wild_visual_navigation/previous.pt") cur = torch.load("/home/jonfrey/git/wild_visual_navigation/current.pt") diff --git a/wild_visual_navigation/learning/utils/metric_logger.py b/.deprecated/utils/metric_logger.py similarity index 89% rename from wild_visual_navigation/learning/utils/metric_logger.py rename to .deprecated/utils/metric_logger.py index c82439d8..5892fa86 100644 --- a/wild_visual_navigation/learning/utils/metric_logger.py +++ b/.deprecated/utils/metric_logger.py @@ -83,19 +83,37 @@ def log_image(self, graph, res, mode, threshold=0.5): self.auroc_self_image[mode](preds=bp, target=bpro.type(torch.long)) self.log_handel( - f"{mode}_auroc_gt_image", self.auroc_gt_image[mode], on_epoch=True, on_step=False, batch_size=BS + f"{mode}_auroc_gt_image", + self.auroc_gt_image[mode], + on_epoch=True, + on_step=False, + batch_size=BS, ) self.log_handel( - f"{mode}_auroc_self_image", self.auroc_self_image[mode], on_epoch=True, on_step=False, batch_size=BS + f"{mode}_auroc_self_image", + self.auroc_self_image[mode], + on_epoch=True, + on_step=False, + batch_size=BS, ) # GT self.acc_gt_image[mode](preds=bp > threshold, target=graph.label.type(torch.long)) self.acc_self_image[mode](preds=bp > threshold, target=bpro.type(torch.long)) - self.log_handel(f"{mode}_acc_gt_image", self.acc_gt_image[mode], on_epoch=True, on_step=False, batch_size=BS) self.log_handel( - f"{mode}_acc_self_image", self.acc_self_image[mode], on_epoch=True, on_step=False, batch_size=BS + f"{mode}_acc_gt_image", + self.acc_gt_image[mode], + on_epoch=True, + on_step=False, + batch_size=BS, + ) + self.log_handel( + f"{mode}_acc_self_image", + self.acc_self_image[mode], + on_epoch=True, + on_step=False, + batch_size=BS, ) @torch.no_grad() @@ -110,7 +128,7 @@ def log_confidence(self, graph, res, confidence, mode): bpro = graph.label.clone().type(torch.float32).flatten() buffer_conf = graph.label.clone().type(torch.float32).flatten() - bp = res[seg_pixel_index, 0].reshape(BS, H, W) + bp = res[seg_pixel_index, 0].reshape(BS, H, W) # noqa: F841 bpro = graph.y[seg_pixel_index].reshape(BS, H, W) buffer_conf = confidence[seg_pixel_index].reshape(BS, H, W) @@ -134,7 +152,11 @@ def log_confidence(self, graph, res, confidence, mode): self.acc_anomaly_gt_image[mode](preds=buffer_conf, target=graph.label.type(torch.long)) self.acc_anomaly_self_image[mode](preds=buffer_conf, target=bpro.type(torch.long)) self.log_handel( - f"{mode}_acc_anomaly_gt_image", self.acc_anomaly_gt_image[mode], on_epoch=True, on_step=False, batch_size=BS + f"{mode}_acc_anomaly_gt_image", + self.acc_anomaly_gt_image[mode], + on_epoch=True, + on_step=False, + batch_size=BS, ) self.log_handel( f"{mode}_acc_anomaly_self_image", @@ -154,7 +176,7 @@ def get_epoch_results(self, mode): roc_gt_image = [None, None, None] auroc_gt_image = self.auroc_gt_image[mode].compute().item() - # generate proprioceptive label + # generate supervision label if self.log_roc_image: roc_self_image = [a.cpu().numpy() for a in self.roc_self_image[mode].compute()] else: diff --git a/.docker/Dockerfile b/.docker/Dockerfile deleted file mode 100644 index ddeebd91..00000000 --- a/.docker/Dockerfile +++ /dev/null @@ -1,21 +0,0 @@ -FROM pytorchlightning/pytorch_lightning:base-conda-py3.8-torch1.10 - -# nvidia-container-runtime -ENV NVIDIA_VISIBLE_DEVICES \ - ${NVIDIA_VISIBLE_DEVICES:-all} -ENV NVIDIA_DRIVER_CAPABILITIES \ - ${NVIDIA_DRIVER_CAPABILITIES:+$NVIDIA_DRIVER_CAPABILITIES,}graphics - -LABEL maintainer="Jonas Frey" mail="jonfrey@ethz.ch" - -SHELL ["/bin/bash", "--login", "-c"] - -ARG DEBIAN_FRONTEND=noninteractive -ENV DEBIAN_frontend noninteractive - -RUN mkdir /home/git -RUN mkdir /home/results -RUN mkdir /home/code -RUN mkdir /home/dataset -RUN mkdir /home/work -RUN mkdir /home/tmpdir \ No newline at end of file diff --git a/.docker/bsub.sh b/.docker/bsub.sh deleted file mode 100755 index 9dbb166e..00000000 --- a/.docker/bsub.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -# Is executed either from your local workstation or on the cluster. -# Syncs the repostiory to the cluster (only on local workstation) and schedules the job on the cluster. -echo "Execute cluster_bsub.sh" -echo "Args: $@" -h=`echo $@` - -if [[ ! $ENV_WORKSTATION_NAME = "euler" ]] -then - rsync -r -v --exclude=.git/* --exclude=results/* --exclude=.neptune/* /home/$USERNAME/git/wild_visual_navigation/ $USERNAME@euler:/cluster/home/$USERNAME/wild_visual_navigation - ssh $USERNAME@euler " source /cluster/home/$USERNAME/.bashrc; bsub -n 24 -R singularity -R \"rusage[mem=2596,ngpus_excl_p=1]\" -W $TIME -o $OUTFILE_NAME -R \"select[gpu_mtotal0>=10000]\" -R \"rusage[scratch=1500]\" -R \"select[gpu_driver>=470]\" /cluster/home/$USERNAME/wild_visual_navigation/.docker/cluster_run.sh $h" -else - TIME=24:00 - bsub -n 24 -R singularity -R "rusage[mem=1596,ngpus_excl_p=1]" -W $TIME -R "select[gpu_mtotal0>=10000]" -R "rusage[scratch=1500]" -R "select[gpu_driver>=470]" /cluster/home/$USERNAME/wild_visual_navigation/.docker/cluster_run.sh $h -fi \ No newline at end of file diff --git a/.docker/build.sh b/.docker/build.sh deleted file mode 100755 index 40f0b3e0..00000000 --- a/.docker/build.sh +++ /dev/null @@ -1,3 +0,0 @@ -ssh_key_name = "id_rsa" - -eval $(ssh-agent) && ssh-add ~/.ssh/id_rsa && export DOCKER_BUILDKIT=1 && docker build --progress=plain --ssh default -f Dockerfile ./ -t wvn \ No newline at end of file diff --git a/.docker/cluster_run.sh b/.docker/cluster_run.sh deleted file mode 100755 index 8d31dd7a..00000000 --- a/.docker/cluster_run.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash - -# Is executed on a node on the cluster, started with the bsub command. - -echo "Execute cluster_run.sh" -echo $@ - -module load gcc/6.3.0 cuda/11.4.2 -tar -xf /cluster/work/rsl/$USERNAME/wvn/containers/wvn.tar -C $TMPDIR - -singularity exec -B $TMPDIR:/home/tmpdir -B $WORK/wvn:/home/work -B $HOME/wvn:/home/git --nv --writable --containall $TMPDIR/wvn.sif /home/wild_visual_navigation/.docker/cluster_run.sh $@ -echo "Execute cluster_run.sh done" -bkill $LSB_JOBID -exit 0 \ No newline at end of file diff --git a/.docker/container_run.sh b/.docker/container_run.sh deleted file mode 100755 index 0dec3285..00000000 --- a/.docker/container_run.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -# Is executed inside the singularity container. -# Start the training. - -echo "Execute cluster_run_container.sh" -h=`echo $@` -echo $h -export ENV_WORKSTATION_NAME=euler - -cd /home/git/wvn -exec bash -c "/root/miniconda3/envs/lightning/bin/python3 -u scripts/train.py --headless $h" \ No newline at end of file diff --git a/.flake8 b/.flake8 index 9888251d..b41810cd 100644 --- a/.flake8 +++ b/.flake8 @@ -1,5 +1,5 @@ [flake8] -ignore=E402,E501,W503 +ignore=E402,E501,W503,C901,E731,W605 extend-ignore = E203 max-line-length = 120 max-complexity = 18 diff --git a/.gitignore b/.gitignore index 133a267b..88488fd1 100644 --- a/.gitignore +++ b/.gitignore @@ -7,6 +7,8 @@ assets/stego/cocostuff27_vit_base_5.ckpt .pytest_cache/** .vscode/** notebooks/** +*.code-workspace + # DrawIO **.dtmp **.bkp @@ -143,3 +145,5 @@ dmypy.json # Pyre type checker .pyre/ +assets/virtual_env/* + diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..ff6efa3b --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,24 @@ +# .pre-commit-config.yaml +# From https://sbarnea.com/lint/black/ +--- +repos: + - repo: https://github.com/python/black.git + rev: 22.12.0 + hooks: + - id: black + language_version: python3 + - repo: https://github.com/pycqa/flake8 + rev: 3.7.9 + hooks: + - id: flake8 + additional_dependencies: + - flake8-black>=0.1.1 + language_version: python3 + - repo: local + hooks: + - id: jupyter-nb-clear-output + name: jupyter-nb-clear-output + files: \.ipynb$ + stages: [commit] + language: system + entry: jupyter nbconvert --ClearOutputPreprocessor.enabled=True --inplace \ No newline at end of file diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..288c58ea --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2022-2024 ETH Zurich, Matias Mattamala, Jonas Frey. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. \ No newline at end of file diff --git a/README.md b/README.md index 015904b6..d6c40739 100644 --- a/README.md +++ b/README.md @@ -2,10 +2,10 @@

Citation • - Setup • + Installation • + Overview • Experiments • - Contributing • - Credits + Development ![Formatting](https://github.com/leggedrobotics/wild_visual_navigation/actions/workflows/formatting.yml/badge.svg)

@@ -15,218 +15,182 @@ Dino ## Citation +``` +@INPROCEEDINGS{frey23fast, + AUTHOR = {Jonas, Frey and Matias, Mattamala and Nived, Chebrolu and Cesar, Cadena and Maurice, Fallon and Marco, Hutter}, + TITLE = {\href{https://arxiv.org/abs/2305.08510}{Fast Traversability Estimation for Wild Visual Navigation}}, + BOOKTITLE = {Proceedings of Robotics: Science and Systems}, + YEAR = {2023}, + ADDRESS = {Daegu, Republic of Korea}, + MONTH = {July}, + DOI = {10.15607/RSS.2023.XIX.054} +} +``` +If you are also building up on the STEGO integration or using the pre-trained models for a comparision please cite: ``` -@INPROCEEDINGS{FreyMattamala23, - AUTHOR = {Jonas Frey and Matias Mattamala and Nived Chebrolu and Cesar Cadena and Maurice Fallon and Marco Hutter}, - TITLE = {{Fast Traversability Estimation for Wild Visual Navigation}}, - BOOKTITLE = {Proceedings of Robotics: Science and Systems}, - YEAR = {2023}, - ADDRESS = {Daegu, Republic of Korea}, - MONTH = {June}, - DOI = {TBD} +@INPROCEEDINGS{mattamala24wild, + AUTHOR = {Mattamala, Matias and Jonas, Frey and Piotr Libera and Chebrolu, Nived and Georg Martius and Cadena, Cesar and Hutter, Marco and Fallon, Maurice}, + TITLE = {{Wild Visual Navigation: Fast Traversability Learning via Pre-Trained Models and Online Self-Supervision}}, + BOOKTITLE = {under review for Autonomous Robots}, + YEAR = {2024} } ``` + +If you are using the elevation_mapping integration +``` +@INPROCEEDINGS{erni23mem, + AUTHOR={Erni, Gian and Frey, Jonas and Miki, Takahiro and Mattamala, Matias and Hutter, Marco}, + TITLE={\href{https://arxiv.org/abs/2309.16818}{MEM: Multi-Modal Elevation Mapping for Robotics and Learning}}, + BOOKTITLE={2023 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)}, + YEAR={2023}, + PAGES={11011-11018}, + DOI={10.1109/IROS55552.2023.10342108} +} +``` + Checkout out also our other works. Dino -## Setup +## Installation -1. Clone the repository. +1. Clone the WVN and our STEGO reimplementation. ```shell mkdir ~/git && cd ~/git git clone git@github.com:leggedrobotics/wild_visual_navigation.git +git clone git@github.com:leggedrobotics/self_supervised_segmentation.git ``` -2. Install the conda environment. +2. Install the virtual environment. ```shell -# Make sure to be in the base conda environment cd ~/git/wild_visual_navigation -conda env create -f environment.yaml +# TODO ``` 3. Install the wild_visual_navigation package. ```shell -conda activate wvn cd ~/git pip3 install -e ./wild_visual_navigation ``` -4. Configure global paths. +4. [Optionally] Configure custom paths +Set your custom global paths by defining the ENV_WORKSTATION_NAME and exporting the variable in your `~/.bashrc`. + + ```shell + export ENV_WORKSTATION_NAME=your_workstation_name + ``` +The paths can be specified by modifying `wild_visual_navigation/wild_visual_navigation/cfg/gloabl_params.py`. +Add your desired global paths. +Per default, all results are stored in `wild_visual_navigation/results`. -      All global paths are stored within a single yaml-file (`cfg\env\ge76.yaml`) for each machine. -      The correct yaml-file for a machine is identified based on the environment variable `ENV_WORKSTATION_NAME`. -      You can generate a new configuration environment file in the same directory: -      Example: `cfg/env/your_workstation_name.yaml` -      Content: -```yaml -# If a relative path is given it is relative the the wild_visual_navigation project directory. -base: results/learning -perugia_root: /media/Data/Datasets/2022_Perugia -``` +Dino +## Overview +![Overview](./assets/drawings/software_overview.jpg) +What we provide: +- Learning and Feature Extraction Nodes integrated in ROS1 +- Gazebo Test Simulation Envrionment +- Example ROSbags +- Pre-trained models with minimalistic inference script (can be used as a easy baseline) +- Integration into elevation_mapping_cupy -      We recommend to directly set the environment variable in your `~/.bashrc` by adding the following: - - ```shell - export ENV_WORKSTATION_NAME=your_workstation_name - ``` Dino ## Experiments -### Robot Usage [Online] -Mode to run the pipeline either fully online on the robot or to simply replay rosbags on your system. -- Launch ANYmal Converter: -```shell -rosrun wild_visual_navigation_anymal anymal_msg_converter_node.py -``` +### Inference pre-trained model -- Run WVN Node: -```shell -python wild_visual_navigation_ros/scripts/wild_visual_navigation_node.py _mode:=debug -``` -There are multiple parameters you can change via the ros-parameter server. -Optionally the node offers services to store the created graph/trained network to the disk. -- (optionally) RVIZ: -```shell -roslaunch wild_visual_navigation_ros view.launch -``` +### Online adaptation [Simulation] + -- (replay only) Run Debayer: +### Online adaptation [Rosbag] + +MPI Outdoor +MPI Indoor +Bahnhofstrasse +Mountain Bike + + +#### Setup +Let`s set up a new catkin_ws: ```shell -roslaunch image_proc_cuda_ros image_proc_cuda_node.launch cam0:=false cam1:=false cam2:=false cam3:=false cam4:=true cam5:=false cam6:=false run_gamma_correction:=false run_white_balance:=true run_vignetting_correction:=false run_color_enhancer:=false run_color_calibration:=false run_undistortion:=true run_clahe:=false dump_images:=false needs_rotation_cam4:=true debayer_option:=bayer_gbrg8 +# Create Workspace +source /opt/ros/noetic/setup.bash +mkdir -r ~/catkin_ws/src && cd ~/catkin_ws/src +catkin init +catkin config --extend /opt/ros/noetic +catkin config --cmake-args -DCMAKE_BUILD_TYPE=RelWithDebInfo + +# Clone Repos +git clone git@github.com:ANYbotics/anymal_d_simple_description.git +git clone git@github.com:ori-drs/procman_ros.git + +# Symlink WVN +ln -s ~/git/wild_visual_navigation ~/catkin_ws/src + +# Dependencies +rosdep install -ryi --from-paths . --ignore-src + +# Build +cd ~/catkin_ws +catkin build anymal_d_simple_description +catkin build procman_ros +catkin build wild_visual_navigation_ros + +# Source +source /opt/ros/noetic/setup.bash +source ~/catkin_ws/devel/setup.bash ``` -- (replay only) Replay Rosbag: +After successfully building the ros workspace you can run the full pipeline by either using the launch file (this requires all packages to be installed into your system python installation), or by running the nodes from the virtual environment as plain python scripts. + + +- Run wild_visual_navigation ```shell -rosbag play --clock path_to_mission/*.bag +roslaunch wild_visual_navigation_ros wild_visual_navigation.launch ``` -### Replay Usage [Online] -We provide a launch file to start all required nodes for close-loop integration. +- (ANYmal replay only) Load ANYmal description for RViZ ```shell -roslaunch wild_visual_navigation_ros replay_launch.launch -``` -The launch file allows to toggle the individual modules on and off. -```xml - - - - - - +roslaunch anymal_d_simple_description load.launch ``` -- Run WVN Node: +- (ANYmal replay only) Replay Rosbag: ```shell -python wild_visual_navigation_ros/scripts/wild_visual_navigation_node.py _mode:=default +robag play --clock path_to_mission/*.bag ``` -- Replay Rosbag: +- RVIZ: ```shell -rosbag play --clock path_to_mission/*.bag +roslaunch wild_visual_navigation_ros view.launch ``` -### Learning Usage [Offline] -#### Dataset Generation -Sometimes it`s useful to just analyze the network training therefore we provide the tools to extract a dataset useful for learning from a given rosbag. -In the following we explain how you can generate the dataset with the following structure: +Degugging (sometimes it is desirable to run the nodes seperate): +```shell +python wild_visual_navigation_ros/scripts/wvn_feature_extractor_node.py ``` -dataset_name - split - forest_train.txt - forest_val.txt - forest_test.txt - day3 - date_time_mission_0_day_3 - features - slic100_dino224_16 - center - time_stamp.pt - ... - graph - time_stamp.pt - ... - seg - time_stamp.pt - ... - slic_sift - ... - ... - image - time_stamp.pt - ... - supervision_mask - time_stamp.pt - ... +```shell +python wild_visual_navigation_ros/scripts/wvn_learning_node.py ``` -1. Dataset is configured in **wild_visual_navigation/utils/dataset_info.py** -2. Run extract_images_and_labels.py (can be also done for multiple missions with **extract_all.py**) - - Will at first merge all bags within the provided folders from dataset_info into bags containing the `_tf.bag` and other useful data `_wvn.bag`. - - Steps blocking through the bag - - Currently, this is done by storing a binary mask and images as .pt files (maybe change to png for storage) -3. Validate if images and labels are generated correctly with **validate_extract_images_and_labels.py** - - This script will remove images if no label is available and the other way around - - The stopping early times should be quite small within the seconds -4. Create lists with the training and train/val/test images: **create_train_val_test_lists.py** -5. Convert the correct `.pt` files to `.png` such that you can upload them for the test set to segments.ai **convert_test_images_for_labelling.py** -6. Label them online -7. Fetch the results using **download_bitmaps_from_segments_ai.py** -8. Extract the features segments and graph from the image **extract_features_for_dataset.py** - - -#### Training the Network -##### Training -We provide scripts for training the network for a single run where a parameter configuration yaml-file can be passed to override the parameters configured within `cfg/experiments_params.py`. -Training from the final dataset. -`python3 scripts/train_gnn.py --exp=exp_forest.yaml` - -##### Hyperparameter -We also provide scripts to use optuna for hyperparameter-searching: - -`python3 scripts/train_optuna.py --exp=exp_forest.yaml` - -Within the objective function you can easily adjust the trail parameter suggestions. - -##### Ablations -Finally, we categorize our ablations into `loss`, `network`, `feature`, `time_adaptation` and `knn_evaluation`. - -##### `loss`, `network`, and `feature` -For `loss`, `network`, and `feature` we can simply run a training script and pass the correct keyword. -We provide the configurations for those experiments within the `cfg/exp/ablation` folder. -``` -python3 scripts/ablation/training_ablation.py --ablation_type=network -``` -After running the training the results are stored respectively in `scripts/ablations/_ablation` as a pickle file. -For each training run the trained network is evaluate on all testing scenes and the AUROC and ROC values are stored with respect to the hand labeled gt-labels and self-supervised proprioceptive-labels. -We provide a jupyter notebook to interpret the training results. -``` -python3 scripts/ablation/training_ablation_visu.ipynb -``` +- The general configuration files can be found under: `wild_visual_navigation/cfg/experiment_params.py` +- This configuration is used in the `offline-model-training` and in the `online-ros` mode. +- When running the `online-ros` mode additional configurations for the individual nodes are defined in `wild_visual_navigation/cfg/ros_params.py`. +- These configuration file is filled based on the rosparameter-server during runtime. +- The default values for this configuration can be found under `wild_visual_navigation/wild_visual_navigation_ros/config/wild_visual_navigation`. +- We set an environment variable to automatically load the correct global paths and trigger some special behavior e.g. when training on a cluster. -##### `time_adaptation` -For the `time_adaptation` run simply run: -``` -python3 scripts/ablation/time_adaptation.py -``` -and for visualization: -``` -python3 scripts/ablation/time_adaptation_visu.py -``` -done. Dino -## Contributing +## Development ### Code formatting ```shell @@ -241,12 +205,29 @@ Code format is checked on push. ### Testing Introduction to [pytest](https://github.com/pluralsight/intro-to-pytest). - ```shell pytest ``` -Pytest is not checked on push. -Dino +### Open-Sourcing +Generating headers +```shell +pip3 install adheader -## Credits +# If your are using zsh otherwise remove \ +addheader wild_visual_navigation -t header.txt -p \*.py --sep-len 79 --comment='#' --sep=' ' +addheader wild_visual_navigation_ros -t header.txt -p \*.py -sep-len 79 --comment='#' --sep=' ' +addheader wild_visual_navigation_anymal -t header.txt -p \*.py --sep-len 79 --comment='#' --sep=' ' + +addheader wild_visual_navigation_ros -t header.txt -p \*CMakeLists.txt --sep-len 79 --comment='#' --sep=' ' +addheader wild_visual_navigation_anymal -t header.txt -p \*.py -p \*CMakeLists.txt --sep-len 79 --comment='#' --sep=' ' +``` + +### Releasing ANYmal data +```shell +rosrun procman_ros sheriff -l ~/git/wild_visual_navigation/wild_visual_navigation_anymal/config/procman/record_rosbags.pmd --start-roscore +``` + +```shell +rosbag_play --tf --sem --flp --wvn mission/*.bag +``` diff --git a/assets/drawings/software_overview.jpg b/assets/drawings/software_overview.jpg new file mode 100644 index 00000000..38d5b0e0 Binary files /dev/null and b/assets/drawings/software_overview.jpg differ diff --git a/assets/images/bahnhofstrasse_raw.png b/assets/images/bahnhofstrasse_raw.png new file mode 100644 index 00000000..9ed1f053 Binary files /dev/null and b/assets/images/bahnhofstrasse_raw.png differ diff --git a/assets/images/bahnhofstrasse_trav.png b/assets/images/bahnhofstrasse_trav.png new file mode 100644 index 00000000..15917325 Binary files /dev/null and b/assets/images/bahnhofstrasse_trav.png differ diff --git a/assets/images/mountain_bike_trail_raw.png b/assets/images/mountain_bike_trail_raw.png new file mode 100644 index 00000000..099617d2 Binary files /dev/null and b/assets/images/mountain_bike_trail_raw.png differ diff --git a/assets/images/mountain_bike_trail_trav.png b/assets/images/mountain_bike_trail_trav.png new file mode 100644 index 00000000..30bece6c Binary files /dev/null and b/assets/images/mountain_bike_trail_trav.png differ diff --git a/assets/images/mpi_indoor_raw.png b/assets/images/mpi_indoor_raw.png new file mode 100644 index 00000000..39d0aeea Binary files /dev/null and b/assets/images/mpi_indoor_raw.png differ diff --git a/assets/images/mpi_indoor_trav.png b/assets/images/mpi_indoor_trav.png new file mode 100644 index 00000000..58ed6fee Binary files /dev/null and b/assets/images/mpi_indoor_trav.png differ diff --git a/assets/images/mpi_outdoor_raw.png b/assets/images/mpi_outdoor_raw.png new file mode 100644 index 00000000..1af3e066 Binary files /dev/null and b/assets/images/mpi_outdoor_raw.png differ diff --git a/assets/images/mpi_outdoor_trav.png b/assets/images/mpi_outdoor_trav.png new file mode 100644 index 00000000..6f1024ef Binary files /dev/null and b/assets/images/mpi_outdoor_trav.png differ diff --git a/cfg/env/jetson.yaml b/cfg/env/jetson.yaml deleted file mode 100644 index 8e38b327..00000000 --- a/cfg/env/jetson.yaml +++ /dev/null @@ -1 +0,0 @@ -base: results \ No newline at end of file diff --git a/docker/Dockerfile b/docker/Dockerfile new file mode 100644 index 00000000..a78b68ec --- /dev/null +++ b/docker/Dockerfile @@ -0,0 +1,126 @@ +##################### +# Base image # +##################### +FROM nvidia/cuda:11.8.0-runtime-ubuntu20.04 as base + +# Labels +LABEL maintainer="Matias Mattamala;Jonas Frey" +LABEL contact="matias@robots.ox.ac.uk;jonfrey@ethz.ch" +LABEL description="WVN Docker" + +ARG ROS_VERSION="noetic" + +# == +# Disable dialog frontend +# == +ARG DEBIAN_FRONTEND=noninteractive + +# == +# Select shell +# == +SHELL ["/bin/bash", "-c"] + +# == +# Install ROS +# == +RUN apt update \ + && apt install --no-install-recommends -y curl gnupg lsb-release git build-essential nano \ + && sh -c 'echo "deb http://packages.ros.org/ros/ubuntu $(lsb_release -sc) main" > /etc/apt/sources.list.d/ros-latest.list' \ + && curl -s https://raw.githubusercontent.com/ros/rosdistro/master/ros.asc | apt-key add - \ + && apt update \ + && apt install --no-install-recommends -y ros-${ROS_VERSION}-desktop-full \ + && apt install --no-install-recommends -y \ + python3-pip \ + python3-venv \ + python3-rosdep \ + python3-rosinstall \ + python3-rosinstall-generator \ + python3-wstool \ + python3-catkin-tools \ + python3-osrf-pycommon \ + && rm -f "/etc/ros/rosdep/sources.list.d/20-default.list" \ + && rosdep init \ + && rosdep update \ + && echo "source /opt/ros/noetic/setup.bash" >> ~/.bashrc + +# == +# Install python packages +# == +RUN cd /root \ + && python3 -m venv env --system-site-packages \ + && source /root/env/bin/activate \ + && pip install --upgrade pip \ + && pip3 install --no-cache-dir \ + torch==2.1.0 torchvision --extra-index-url https://download.pytorch.org/whl/cu121 \ + && pip3 install --no-cache-dir \ + black \ + flake8 \ + jupyter \ + wget \ + numpy \ + tqdm \ + kornia \ + torchmetrics \ + pytorch_lightning \ + pytest \ + scipy \ + scikit-image \ + scikit-learn \ + matplotlib \ + seaborn \ + pandas \ + pytictac \ + torch_geometric \ + omegaconf \ + optuna \ + neptune \ + fast-slic \ + hydra-core \ + prettytable \ + termcolor \ + pydensecrf@git+https://github.com/lucasb-eyer/pydensecrf.git \ + liegroups@git+https://github.com/mmattamala/liegroups \ + opencv-python>=4.6 + +# == +# Remove cache and extra files +# == +RUN rm -rf /var/lib/apt/lists/* && apt clean + + +# == +# Enable dialog again +# == +ARG DEBIAN_FRONTEND=dialog + +# == +# Run bash +# == +CMD ["/bin/bash"] +WORKDIR /root + + +############################################################### +# Development image +# This adds display access and simulation support +############################################################### + +FROM base as dev + +ENV DEBIAN_FRONTEND=noninteractive + +RUN mkdir -p catkin_ws/src \ + && source /opt/ros/noetic/setup.bash \ + && source "/root/.bashrc" \ + && cd /root/catkin_ws && catkin build \ + && apt-get update \ + && apt-get install -y \ + ros-noetic-jackal-simulator \ + ros-noetic-jackal-desktop \ + ros-noetic-teleop-twist-keyboard \ + ros-noetic-rqt-robot-steering \ + && rm -rf /var/lib/apt/lists/* + +RUN echo "source /root/catkin_ws/devel/setup.bash" >> ~/.bashrc +RUN echo "source /root/env/bin/activate" >> ~/.bashrc +ENV DEBIAN_FRONTEND=dialog \ No newline at end of file diff --git a/docker/Dockerfile.pyg b/docker/Dockerfile.pyg deleted file mode 100644 index ab23f9c6..00000000 --- a/docker/Dockerfile.pyg +++ /dev/null @@ -1,54 +0,0 @@ -ARG BASE_IMAGE=rslethz/jetpack-5:r34.1.1-ml -FROM ${BASE_IMAGE} - -# Labels -LABEL maintainer="Matias Mattamala" -LABEL contact="matias@robots.ox.ac.uk" -LABEL description="Image with custom pytorch geometric (pyg) builds for Jetson" -LABEL example_usage="docker run --cidfile /tmp/jetson_docker.cid -it --rm --net=host --runtime nvidia -e DISPLAY=$DISPLAY -v /tmp/.X11-unix/:/tmp/.X11-unix -v ${YOUR_GIT_WS}:/root/git -v ${YOUR_CATKIN_WS}:/root/catkin_ws/ rslethz/jetpack-5:r34.1.1-wvn" - -# Set CUDA_ARCH_LIST for torch-geometric compilation -ARG TORCH_CUDA_ARCH_LIST="5.3;6.2;7.2;8.7" - -# Pytorch geometric -RUN echo "Installing pytorch-geometric. It requires to compile each module to enable CUDA 11.4 support" - -# Jetson architectures -ENV TORCH_CUDA_ARG_LIST=$TORCH_CUDA_ARCH_LIST -RUN echo TORCH_CUDA_ARG_LIST=$TORCH_CUDA_ARG_LIST - -ENV PATH="/usr/local/cuda/bin:${PATH}" -RUN echo PATH=$PATH - -ENV CPATH="/usr/local/cuda/include:${CPATH}" -RUN echo CPATH=$CPATH - -ENV LD_LIBRARY_PATH="/usr/local/cuda/lib64:${LD_LIBRARY_PATH}" -RUN echo LD_LIBRARY_PATH=$LD_LIBRARY_PATH - -# Library path -ENV LIBRARY_PATH="/usr/local/cuda/lib64:${LIBRARY_PATH}" -RUN echo LIBRARY_PATH=$LIBRARY_PATH - -# CUDA_HOME -ENV CUDA_HOME="/usr/local/cuda" - -# torch-scatter -RUN echo "Compiling torch-scatter" -RUN pip3 -v install torch-scatter - -# torch-sparse -RUN echo "Compiling torch-sparse" -RUN pip3 -v install torch-sparse - -# torch-cluster -RUN echo "Compiling torch-cluster" -RUN pip3 -v install torch-cluster - -# torch-spline-conv -RUN echo "Compiling torch-spline-conv" -RUN pip3 -v install torch-spline-conv - -# torch-geometric -RUN echo "Compiling torch-geometric" -RUN pip3 -v install --no-cache-dir torch-geometric diff --git a/docker/Dockerfile.wvn b/docker/Dockerfile.wvn deleted file mode 100644 index 990e798f..00000000 --- a/docker/Dockerfile.wvn +++ /dev/null @@ -1,66 +0,0 @@ -ARG BASE_IMAGE=rslethz/jetpack-5:r34.1.1-ml-pyg -FROM ${BASE_IMAGE} -ARG BASE_IMAGE -RUN echo BASE_IMAGE=${BASE_IMAGE} - -# Labels -LABEL maintainer="Matias Mattamala" -LABEL contact="matias@robots.ox.ac.uk" -LABEL description="Image with WVN dependencies" -LABEL example_usage="docker run --cidfile /tmp/jetson_docker.cid -it --rm --net=host --runtime nvidia -e DISPLAY=$DISPLAY -v /tmp/.X11-unix/:/tmp/.X11-unix -v ${YOUR_GIT_WS}:/root/git -v ${YOUR_CATKIN_WS}:/root/catkin_ws/ rslethz/jetpack-5:wvn-latest" - -# Extra libraries for other ROS dependencies -RUN pip3 install --no-cache-dir ruamel.yaml -RUN pip3 install --no-cache-dir black -RUN pip3 install --no-cache-dir flake8 -RUN pip3 install --no-cache-dir shapely==1.7.1 - -# Install Python dependencies -RUN pip3 install --no-cache-dir pillow==9.2 -RUN pip3 install --no-cache-dir wget -RUN pip3 install --no-cache-dir colorama -RUN pip3 install --no-cache-dir simple-parsing -RUN pip3 install --no-cache-dir kornia -RUN pip3 install --no-cache-dir pytest -RUN pip3 install --no-cache-dir scipy -RUN pip3 install --no-cache-dir scikit-image -RUN pip3 install --no-cache-dir scikit-learn -RUN pip3 install --no-cache-dir seaborn -RUN pip3 install --no-cache-dir pandas -RUN pip3 install --no-cache-dir fast_slic - -# Stuff below only applies to Jetson -# Environment variables to make scikit-learn work -# If not added, it will raise an error: -# ImportError: /usr/local/lib/python3.8/dist-packages/skimage/_shared/../../scikit_image.libs/libgomp-d22c30c5.so.1.0.0: cannot allocate memory in static TLS block -# It seems that scikit-image has not been built correctly. -# -# Your install of scikit-image appears to be broken. -# Try re-installing the package following the instructions at: -# https://scikit-image.org/docs/stable/install.html -# -ENV LD_PRELOAD=/usr/local/lib/python3.8/dist-packages/skimage/_shared/../../scikit_image.libs/libgomp-d22c30c5.so.1.0.0 - -# Pytorch geometric -RUN pip3 -v install torch-scatter -RUN pip3 -v install torch-sparse -RUN pip3 -v install torch-cluster -RUN pip3 -v install torch-spline-conv -RUN pip3 -v install torch-geometric - -# Other learning packages -RUN pip3 install --no-cache-dir neptune-client[optuna] -RUN pip3 install --no-cache-dir hydra-core -RUN pip3 install --no-cache-dir pytorch_lightning==1.6.5 - -# Other dependencies -RUN pip3 install --no-cache-dir git+https://github.com/lucasb-eyer/pydensecrf.git#egg=pydensecrf -RUN pip3 install --no-cache-dir git+https://github.com/mmattamala/liegroups#egg=liegroups -RUN pip3 install --no-cache-dir git+https://github.com/leggedrobotics/stego.git#egg=stego -RUN pip3 install --no-cache-dir git+https://github.com/JonasFrey96/pytorch_pwc.git#egg=pytorch_pwc==0.0.1 - -# Setup entrypoint -COPY entrypoint.sh /entrypoint.sh -ENTRYPOINT ["/entrypoint.sh"] -CMD ["bash"] -WORKDIR /root/ diff --git a/docker/README.md b/docker/README.md index 83851998..4723f414 100644 --- a/docker/README.md +++ b/docker/README.md @@ -1,32 +1,34 @@ -## Docker build instructions -### General overview -This folder stores basic Dockerfiles and scripts to build the dependencies of the wild_visual_navigation package for (1) desktop machines and (2) Jetson platforms. +# Docker files -The main difference between both are the source images. While the one for desktop/laptops rely on NVidia's images with Pytorch support `nvcr.io/nvidia/pytorch:20.06-py3`, the Jetson ones are more involved and rely on internal images prepared by RSL. Additionally, Jetson requires to build pytorch geometric (`pyg`) from scratch, so we also add Dockerfiles to build an intermediate image for this purpose only. +## Build and run containers -### Usage -While staying in the docker folder, you can build the images for desktop platforms (`rslethz/desktop:r34.1.1-wvn`) by using: +To build the container: ```sh -./bin/build.sh --target=desktop +docker compose -f docker-compose.yaml build ``` -Similarly, for Jetson ones you must use: +To run the container (terminal-based): + +```sh +docker compose -f docker-compose.yaml up -d +``` + +To launch bash on the container: + ```sh -./bin/build.sh --target=jetson +docker compose exec wvn bash ``` -The Jetson version will generate the images `rslethz/jetpack-5:r34.1.1-ml-pyg` and `rslethz/jetpack-5:r34.1.1-wvn`, with pytorch geometric and full wild_visual_navigation dependencies respectively. -### Pushing images to ORI server -For internal use at the Oxford Robotics Institute (ORI), we also provide a helper script to upload the images to the docker server. For example, after building the images you can run: +To stop the container: ```sh -./bin/push_ori.sh --target=jetson +docker compose -f docker-compose.yaml stop ``` -This will create new tags for the previously mentioned images: +To run the GUI-enabled version and check the Gazebo environment: + ```sh -rslethz/jetpack-5:r34.1.1-ml-pyg -> ori-ci-gateway.robots.ox.ac.uk:12002/drs/jetson:r34.1.1-ml-pyg-latest -rslethz/jetpack-5:r34.1.1-wvn -> ori-ci-gateway.robots.ox.ac.uk:12002/drs/jetson:r34.1.1-wvn-latest +docker compose -f docker-compose-gui.yaml up -d +docker compose exec wvn_gui bash ``` -which comply with the naming used internally at ORI. \ No newline at end of file diff --git a/docker/bin/build.sh b/docker/bin/build.sh deleted file mode 100755 index c3620e0b..00000000 --- a/docker/bin/build.sh +++ /dev/null @@ -1,43 +0,0 @@ -#!/bin/bash -set -e - -# Source variables -source bin/env_variables.sh - -# Default target -TARGET=none - -# Read arguments -for i in "$@" -do -case $i in - -t=*|--target=*) - TARGET=${i#*=} - echo "[build.sh]: User-set target type is: '$TARGET'" - shift - ;; -esac -done - -# Handle different target cases -if [[ "$TARGET" == "jetson" ]]; then - echo "Building images for target [$TARGET]" - - # Build pytorch geometric docker - echo "Building ${PYG_TAG} from $PYG_DOCKERFILE" - sudo docker build --build-arg BASE_IMAGE=$ML_JETSON_TAG --network=host -t $PYG_JETSON_TAG -f $PYG_DOCKERFILE . - - # Build wvn docker - echo "Building ${WVN_TAG} from $WVN_DOCKERFILE" - sudo docker build --build-arg BASE_IMAGE=$PYG_JETSON_TAG --network=host -t $WVN_JETSON_TAG -f $WVN_DOCKERFILE . - -elif [[ "$TARGET" == "desktop" ]]; then - echo "Building images for target [$TARGET]" - - # Build wvn docker - echo "Building ${WVN_DESKTOP_TAG} from $WVN_DOCKERFILE" - sudo docker build --build-arg BASE_IMAGE=$ML_DESKTOP_TAG --network=host -t $WVN_DESKTOP_TAG -f $WVN_DOCKERFILE . - -else - echo "Error: unsupported target [$TARGET]" -fi \ No newline at end of file diff --git a/docker/bin/env_variables.sh b/docker/bin/env_variables.sh deleted file mode 100755 index c9cb1122..00000000 --- a/docker/bin/env_variables.sh +++ /dev/null @@ -1,19 +0,0 @@ -#!/bin/bash - -# Docker images -PYG_DOCKERFILE=Dockerfile.pyg -WVN_DOCKERFILE=Dockerfile.wvn - -# Source ML images (with Pytorch support) -ML_JETSON_TAG="rslethz/jetpack-5:r34.1.1-ml" -ML_DESKTOP_TAG="nvcr.io/nvidia/pytorch:20.06-py3" - -# Export tags -PYG_JETSON_TAG="rslethz/jetpack-5:r34.1.1-ml-pyg" -WVN_JETSON_TAG="rslethz/jetpack-5:r34.1.1-wvn" -WVN_DESKTOP_TAG="rslethz/desktop:r34.1.1-wvn" - -# ORI-compliant tags for built images -ORI_PYG_JETSON_TAG="ori-ci-gateway.robots.ox.ac.uk:12002/drs/jetson:r34.1.1-ml-pyg-latest" -ORI_WVN_JETSON_TAG="ori-ci-gateway.robots.ox.ac.uk:12002/drs/jetson:r34.1.1-wvn-latest" -ORI_WVN_DESKTOP_TAG="ori-ci-gateway.robots.ox.ac.uk:12002/drs/desktop:wvn-latest" \ No newline at end of file diff --git a/docker/bin/push_ori.sh b/docker/bin/push_ori.sh deleted file mode 100755 index b7121cc0..00000000 --- a/docker/bin/push_ori.sh +++ /dev/null @@ -1,50 +0,0 @@ -#!/bin/bash -# This is an Oxford Robotics Institute (ORI)-specific executable -# It makes tags that are compliant with the internal naming for the images -# and pushes the images to our local server - -set -e - -# Source variables -source bin/env_variables.sh - -# Default target -TARGET=none - -# Read arguments -for i in "$@" -do -case $i in - -t=*|--target=*) - TARGET=${i#*=} - echo "[build.sh]: User-set target type is: '$TARGET'" - shift - ;; -esac -done - -update_tags_and_push() -{ - local original_tag=$1 - local new_tag=$2 - - # Build pytorch geometric docker - echo "Making new tag $new_tag" - docker tag $original_tag $new_tag - echo "Pushing $new_tag to server..." - docker push $new_tag - #echo "Removing tag $original_tag" - #docker image rm $original_tag -} - -# Handle different target cases -if [[ "$TARGET" == "jetson" ]]; then - update_tags_and_push "$PYG_JETSON_TAG" "$ORI_PYG_JETSON_TAG" - update_tags_and_push "$WVN_JETSON_TAG" "$ORI_WVN_JETSON_TAG" - -elif [[ "$TARGET" == "desktop" ]]; then - update_tags_and_push $WVN_DESKTOP_TAG $ORI_WVN_DESKTOP_TAG - -else - echo "Error: unsupported target [$TARGET]" -fi diff --git a/docker/bin/run.sh b/docker/bin/run.sh deleted file mode 100755 index c9d167da..00000000 --- a/docker/bin/run.sh +++ /dev/null @@ -1,38 +0,0 @@ -#!/bin/bash -# This script launches a WVN container for the target platform - -set -e - -# Source variables -source bin/env_variables.sh - -# Default target -TARGET=none -GIT_WS=$HOME/git -CATKIN_WS=$HOME/catkin_ws - -# Read arguments -for i in "$@" -do -case $i in - -t=*|--target=*) - TARGET=${i#*=} - echo "[build.sh]: User-set target type is: '$TARGET'" - shift - ;; -esac -done - - -# Handle different target cases -if [[ "$TARGET" == "jetson" ]]; then - docker run -it --rm --net=host --runtime nvidia -e DISPLAY=$DISPLAY -v /tmp/.X11-unix/:/tmp/.X11-unix -v ${GIT_WS}:/root/git -v ${CATKIN_WS}:/root/catkin_ws/ $WVN_JETSON_TAG - -elif [[ "$TARGET" == "desktop" ]]; then - docker run -it --rm --net=host --gpus all --runtime nvidia -e DISPLAY=$DISPLAY -v /tmp/.X11-unix/:/tmp/.X11-unix -v ${GIT_WS}:/root/git -v ${CATKIN_WS}:/root/catkin_ws/ $WVN_DESKTOP_TAG - -else - echo "Error: unsupported target [$TARGET]" -fi - - diff --git a/docker/docker-compose-gui-nvidia.yaml b/docker/docker-compose-gui-nvidia.yaml new file mode 100644 index 00000000..23414ae0 --- /dev/null +++ b/docker/docker-compose-gui-nvidia.yaml @@ -0,0 +1,10 @@ +version: "3.9" +services: + wvn: + extends: + file: docker-compose-gui.yaml + service: wvn + environment: + - NVIDIA_VISIBLE_DEVICES=all + - NVIDIA_DRIVER_CAPABILITIES=all + runtime: nvidia \ No newline at end of file diff --git a/docker/docker-compose-gui.yaml b/docker/docker-compose-gui.yaml new file mode 100644 index 00000000..0cc35c5f --- /dev/null +++ b/docker/docker-compose-gui.yaml @@ -0,0 +1,13 @@ +version: "3.9" +services: + wvn: + extends: + file: docker-compose.yaml + service: wvn + environment: + - DISPLAY=${DISPLAY} + - QT_X11_NO_MITSHM=1 + volumes: + - /dev/dri:/dev/dri + - /tmp/.X11-unix:/tmp/.X11-unix:rw + - /tmp/.docker.xauth:/tmp/.docker.xauth:rw \ No newline at end of file diff --git a/docker/docker-compose-nvidia.yaml b/docker/docker-compose-nvidia.yaml new file mode 100644 index 00000000..4b82f8df --- /dev/null +++ b/docker/docker-compose-nvidia.yaml @@ -0,0 +1,9 @@ +version: "3.9" +services: + wvn: + extends: + file: docker-compose.yaml + service: wvn + environment: + - NVIDIA_VISIBLE_DEVICES=all + runtime: nvidia \ No newline at end of file diff --git a/docker/docker-compose.yaml b/docker/docker-compose.yaml new file mode 100644 index 00000000..4a1058e0 --- /dev/null +++ b/docker/docker-compose.yaml @@ -0,0 +1,23 @@ +version: "3.9" +services: + wvn: + build: + context: .. + dockerfile: docker/Dockerfile + target: dev + stdin_open: true + tty: true + network_mode: "host" + volumes: + - ../../wild_visual_navigation:/root/catkin_ws/src/wild_visual_navigation + - ../../self_supervised_segmentation:/root/catkin_ws/src/self_supervised_segmentation + - ../../segment-anything:/root/catkin_ws/src/segment-anything + environment: + - ROS_IP=127.0.0.1 + command: "/bin/bash" + # command: > + # /bin/bash -c "pip3 install -e /root/catkin_ws/src/wild_visual_navigation + # && pip3 install -e /root/catkin_ws/src/self_supervised_segmentation + # && cd /root/catkin_ws && catkin build + # && cd /root + # && source /root/catkin_ws/devel/setup.bash" \ No newline at end of file diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh deleted file mode 100755 index 4a608ff3..00000000 --- a/docker/entrypoint.sh +++ /dev/null @@ -1,14 +0,0 @@ -#!/bin/bash -set -e - -# Installing wild visual navigation -install_wvn="pip3 install -e /root/git/wild_visual_navigation" - - -echo "Installing wvn: ${install_wvn}..." -$install_wvn > /dev/null -echo "Done!" - -echo "source /root/catkin_ws/devel/setup.bash" >> /root/.bashrc - -exec "$@" diff --git a/environment.yaml b/environment.yaml deleted file mode 100644 index 7ba355e1..00000000 --- a/environment.yaml +++ /dev/null @@ -1,50 +0,0 @@ -name: wvn -channels: - - pytorch - - robostack - - robostack-experimental - - conda-forge - - pyg - - defaults -dependencies: - - python=3.8 - - kornia=0.6.6 - - pip - - torchvision=0.12 - - pytorch=1.11.0 - - torchaudio - - cudatoolkit=11.3 - - pytest - - scipy - - scikit-image - - scikit-learn - - ros-noetic-ros-base - - ros-noetic-cv-bridge - - libopencv - - compilers - - cmake - - pkg-config - - make - - ninja - - catkin_tools - - pyg - - neptune-client - - simple-parsing - - matplotlib - - seaborn - - pandas - - torchmetrics==0.11.0 - - pip: - - neptune-client[optuna] - - wandb - - wget - - hydra-core - - pytorch_lightning==1.6.5 - - colorama - - kornia_rs - - fast_slic - - git+https://github.com/lucasb-eyer/pydensecrf.git#egg=pydensecrf - - git+https://github.com/kornia/kornia#egg=kornia - - git+https://github.com/mmattamala/liegroups#egg=liegroups - - --editable git+https://github.com/leggedrobotics/stego.git#egg=stego==0.0.1 - - --editable git+https://github.com/JonasFrey96/pytorch_pwc.git#egg=pytorch_pwc==0.0.1 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..e34796ec --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,2 @@ +[tool.black] +line-length = 120 \ No newline at end of file diff --git a/scripts/confidence_generator/plot_confidence.ipynb b/scripts/confidence_generator/plot_confidence.ipynb deleted file mode 100644 index 2ba20dbe..00000000 --- a/scripts/confidence_generator/plot_confidence.ipynb +++ /dev/null @@ -1,166 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import wild_visual_navigation.visu.paper_colors as pc\n", - "import torch\n", - "import os\n", - "import numpy as np\n", - "import matplotlib.pyplot as plt\n", - "from pathlib import Path\n", - "\n", - "%matplotlib inline\n", - "\n", - "WORKING_DIR = \"/home/matias/git_rsl/wild_visual_navigation/results/learning/2023-01-21T15-39-12_test\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "samples_directory = Path(os.path.join(WORKING_DIR, \"confidence_generator\"))\n", - "\n", - "# Check confidence generator folder\n", - "if not samples_directory.exists():\n", - " raise FileNotFoundError(f\"Confidence generator path selected does not exists: {confidence_generator_path}\")\n", - "\n", - "# Load all the .pt files\n", - "steps = []\n", - "all_samples = []\n", - "positive_samples = []\n", - "gaussian_mean = []\n", - "gaussian_std = []\n", - "\n", - "for s in sorted(samples_directory.rglob(\"*.pt\")):\n", - " data = torch.load(s)\n", - "\n", - " # Get step\n", - " step = int(s.stem.replace(\"samples_\", \"\"))\n", - "\n", - " # Save step\n", - " steps.append(step)\n", - " all_samples.append(data[\"x\"].numpy())\n", - " positive_samples.append(data[\"x_positive\"].numpy())\n", - " gaussian_mean.append(data[\"mean\"].item())\n", - " gaussian_std.append(data[\"std\"].item())\n", - "\n", - "steps = np.asarray(steps)\n", - "gaussian_mean = np.asarray(gaussian_mean)\n", - "gaussian_std = np.asarray(gaussian_std)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "from datetime import datetime\n", - "\n", - "N_steps = len(steps)\n", - "N_skip = 5\n", - "fig, axs = plt.subplots(N_steps, constrained_layout=True, figsize=(5, N_steps * 2))\n", - "\n", - "for s in range(N_steps):\n", - " N = 30\n", - " bins = np.linspace(0, 15, N)\n", - " all_hist = np.histogram(all_samples[s], bins)\n", - " pos_hist = np.histogram(positive_samples[s], bins)\n", - "\n", - " axs[s].bar(all_hist[1][:-1], all_hist[0] / all_hist[0].max(), alpha=0.5, color=\"k\", width=0.4)\n", - " axs[s].bar(pos_hist[1][:-1], pos_hist[0] / pos_hist[0].max(), alpha=0.5, color=\"b\", width=0.4)\n", - " axs[s].plot(bins, np.exp(-((bins - gaussian_mean[s]) ** 2) / (2 * gaussian_std[s] ** 2)), color=\"b\", linewidth=2)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [ - "# Plot distribution over time\n", - "# samples as scatterplot\n", - "# Positive Gaussian as GP (mean + std)\n", - "\n", - "# Plot stuff\n", - "fig_scale = 0.5\n", - "fig, axs = plt.subplots(1, constrained_layout=True, figsize=(fig_scale * 13, fig_scale * 9))\n", - "\n", - "for s in range(len(steps)):\n", - " step = steps[s]\n", - "\n", - " N = len(all_samples[s])\n", - " axs.scatter(\n", - " step * np.ones(N), all_samples[s], color=(0.2, 0.2, 0.2), alpha=0.05, linewidths=0\n", - " ) # axs.set_xlabel(\"Steps\")\n", - "\n", - " M = len(positive_samples[s])\n", - " axs.scatter(\n", - " step * np.ones(M), positive_samples[s], color=pc.paper_colors_rgb_f[\"blue\"], alpha=0.3, linewidths=0\n", - " ) # label=\"positive_samples\"\n", - "\n", - "std_factor = 1.5\n", - "axs.fill_between(\n", - " steps,\n", - " gaussian_mean * 0,\n", - " gaussian_mean + std_factor * gaussian_std,\n", - " color=pc.darken(pc.paper_colors_rgb_f[\"blue\"], 0.3),\n", - " alpha=0.2,\n", - " linewidth=0,\n", - ")\n", - "axs.plot(\n", - " steps,\n", - " gaussian_mean,\n", - " label=\"Gaussian approximation\",\n", - " color=pc.darken(pc.paper_colors_rgb_f[\"blue\"], 0.3),\n", - " linewidth=2,\n", - ")\n", - "axs.set_xlabel(\"Steps\")\n", - "axs.set_ylabel(\"Reconstruction loss\")\n", - "axs.legend()" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "metadata": {}, - "outputs": [], - "source": [] - } - ], - "metadata": { - "kernelspec": { - "display_name": "wvn", - "language": "python", - "name": "python3" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - }, - "orig_nbformat": 4, - "vscode": { - "interpreter": { - "hash": "afa7ba36e378b18fb9d9622f0c65b59a17b022d718aa019edc90e2573a229775" - } - } - }, - "nbformat": 4, - "nbformat_minor": 2 -} diff --git a/scripts/helpers/visu_data.py b/scripts/helpers/visu_data.py deleted file mode 100644 index 66792a43..00000000 --- a/scripts/helpers/visu_data.py +++ /dev/null @@ -1,64 +0,0 @@ -# Use the same config to load the data using the dataloader -from wild_visual_navigation.learning.dataset import get_ablation_module -from wild_visual_navigation.learning.utils import load_env -from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.visu import LearningVisualizer -import torch -from torchmetrics import Accuracy, AUROC -from PIL import Image -import numpy as np -import copy -import os -import pickle -from pathlib import Path -import time - -env = load_env() -test_all_datasets = True - -for scene in ["forest", "hilly", "grassland"]: - ablation_data_module = { - "batch_size": 1, - "num_workers": 0, - "env": scene, - "feature_key": "slic_dino", - "test_equals_val": False, - "val_equals_test": False, - "test_all_datasets": test_all_datasets, - "training_data_percentage": 100, - "training_in_memory": False, - } - train_loader, val_loader, test_loader = get_ablation_module( - **ablation_data_module, perugia_root=env["perugia_root"] - ) - test_scenes = [a.dataset.env for a in test_loader] - - visualizer = LearningVisualizer() - import matplotlib.pyplot as plt - - # You probably won't need this if you're embedding things in a tkinter plot... - plt.ion() - - fig = plt.figure() - ax = fig.add_subplot(111) - obj = ax.imshow(np.zeros((100, 100, 3))) - for t in test_loader: - for j, d in enumerate(t): - graph = d[0] - b = 0 - center = graph.center[:] - seg = graph.seg - img = graph.img - # res = visualizer.plot_traversability_graph_on_seg( - # graph.y_gt.type(torch.float32), seg[0], graph, center, img[0], not_log=True - # ) - - res = visualizer.plot_detectron(img[0], graph.label[0].type(torch.long), not_log=True, max_seg=2) - i1 = Image.fromarray(res) - - obj.set_data(i1) - - fig.canvas.draw() - fig.canvas.flush_events() - print(j) - time.sleep(0.25) diff --git a/scripts/paper/convert_to_pandas.ipynb b/scripts/paper/convert_to_pandas.ipynb deleted file mode 100644 index 66074b62..00000000 --- a/scripts/paper/convert_to_pandas.ipynb +++ /dev/null @@ -1,198 +0,0 @@ -{ - "cells": [ - { - "cell_type": "markdown", - "id": "dfe70020", - "metadata": {}, - "source": [ - "# Load run data and convert to pandas dataframe" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "d0b50d34", - "metadata": {}, - "outputs": [], - "source": [ - "# %matplotlib notebook\n", - "\n", - "from wild_visual_navigation import WVN_ROOT_DIR\n", - "import os\n", - "import seaborn as sns\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import pandas as pd\n", - "import pickle\n", - "\n", - "folder, model_name = \"confidence_fn\", False\n", - "folder, model_name = \"lr\", False\n", - "folder, model_name = \"feature\", False\n", - "folder, model_name = \"loss\", False\n", - "folder, model_name = \"network\", False\n", - "\n", - "\n", - "folder, model_name = \"threshold\", False\n", - "\n", - "folder, model_name = \"anomaly_detection_only\", False\n", - "folder, model_name = \"classicial_learning\", True\n", - "# folder, model_name = \"debug\", False\n", - "# p = os.path.join(WVN_ROOT_DIR, f\"scripts/ablations/{folder}_ablation/{folder}_ablation_test_results.pkl\")\n", - "ws = \"_\" + os.environ[\"ENV_WORKSTATION_NAME\"]\n", - "p = os.path.join(WVN_ROOT_DIR, f\"results/ablations/{folder}_ablation{ws}/{folder}_ablation_test_results.pkl\")\n", - "with open(p, \"rb\") as f:\n", - " res = pickle.load(f)\n", - "\n", - "import scipy\n", - "import pandas as pd\n", - "import pickle\n", - "import copy\n", - "\n", - "data = {\n", - " \"run_nr\": [],\n", - " \"cfg_name\": [],\n", - " \"train_scene\": [],\n", - " \"test_scene\": [],\n", - "}\n", - "\n", - "for train_scene_name, scene_data in res.items():\n", - " for config_path, config_data in scene_data.items():\n", - " if model_name:\n", - " name = config_path\n", - " else:\n", - " # its a path\n", - " name = config_path.split(\"/\")[-1][:-5]\n", - " for run_nr, run_data in config_data.items():\n", - " for test_scene_name, test_scene_data in run_data.items():\n", - " data[\"cfg_name\"].append(name)\n", - " data[\"run_nr\"].append(run_nr)\n", - " data[\"train_scene\"].append(train_scene_name)\n", - " data[\"test_scene\"].append(test_scene_name)\n", - " for k in test_scene_data.keys():\n", - " if type(test_scene_data[k]) is float:\n", - " try:\n", - " data[k].append(test_scene_data[k])\n", - " except:\n", - " data[k] = [test_scene_data[k]]\n", - "\n", - "df = pd.DataFrame.from_dict(data)\n", - "\n", - "data_acc = {\n", - " \"cfg_name\": [],\n", - " \"train_scene\": [],\n", - " \"test_scene\": [],\n", - "}\n", - "\n", - "data_auroc = {\n", - " \"cfg_name\": [],\n", - " \"train_scene\": [],\n", - " \"test_scene\": [],\n", - "}\n", - "keys = []\n", - "for i in range(len(df)):\n", - " n = data[\"cfg_name\"][i]\n", - " tr = data[\"train_scene\"][i]\n", - " te = data[\"test_scene\"][i]\n", - "\n", - " a = df[(df[\"train_scene\"] == tr) * (df[\"test_scene\"] == te) * (df[\"cfg_name\"] == n)]\n", - " k = n + tr + te\n", - " if k in keys:\n", - " continue\n", - " keys.append(k)\n", - "\n", - " data_auroc[\"cfg_name\"].append(n)\n", - " data_auroc[\"train_scene\"].append(tr)\n", - " data_auroc[\"test_scene\"].append(te)\n", - "\n", - " data_acc[\"cfg_name\"].append(n)\n", - " data_acc[\"train_scene\"].append(tr)\n", - " data_acc[\"test_scene\"].append(te)\n", - "\n", - " for k in a.keys():\n", - "\n", - " if k.find(\"auroc\") != -1:\n", - " try:\n", - " data_auroc[k + \"_std\"].append(a[k].std())\n", - " data_auroc[k + \"_m\"].append(a[k].mean())\n", - " except:\n", - " data_auroc[k + \"_std\"] = [a[k].std()]\n", - " data_auroc[k + \"_m\"] = [a[k].mean()]\n", - " if k.find(\"acc\") != -1:\n", - " try:\n", - " data_acc[k + \"_std\"].append(a[k].std())\n", - " data_acc[k + \"_m\"].append(a[k].mean())\n", - " except:\n", - " data_acc[k + \"_std\"] = [a[k].std()]\n", - " data_acc[k + \"_m\"] = [a[k].mean()]\n", - "df_auroc = pd.DataFrame.from_dict(data_auroc)\n", - "df_auroc = df_auroc.round(decimals=4)\n", - "df_auroc[df_auroc.select_dtypes(include=[\"number\"]).columns] = (\n", - " df_auroc[df_auroc.select_dtypes(include=[\"number\"]).columns] * 100\n", - ")\n", - "\n", - "df_acc = pd.DataFrame.from_dict(data_acc)\n", - "df_acc = df_acc.round(decimals=4)\n", - "df_acc[df_acc.select_dtypes(include=[\"number\"]).columns] = (\n", - " df_acc[df_acc.select_dtypes(include=[\"number\"]).columns] * 100\n", - ")" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "08841994", - "metadata": {}, - "outputs": [], - "source": [ - "df_acc = df_acc.sort_values(\"test_acc_gt_image_m\", ascending=False)\n", - "scene = \"hilly\"\n", - "df_acc[(df_acc[\"train_scene\"].str.find(scene) != -1)] # * (df_acc[\"test_scene\"].str.find(scene) != -1)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "57a22059", - "metadata": {}, - "outputs": [], - "source": [ - "df_acc = df_acc.sort_values(\"test_acc_gt_image_m\", ascending=False)\n", - "scene = \"forest\"\n", - "df_acc[(df_acc[\"train_scene\"].str.find(scene) != -1)] # (df[\"test_scene\"].str.find(scene) != -1)" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "a5cc7cee", - "metadata": {}, - "outputs": [], - "source": [ - "df_acc = df_acc.sort_values(\"test_acc_gt_image_m\", ascending=False)\n", - "scene = \"grassland\"\n", - "df_acc[(df_acc[\"train_scene\"].str.find(scene) != -1)] # (df[\"test_scene\"].str.find(scene) != -1)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "wvn", - "language": "python", - "name": "wvn" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/scripts/paper/data_percentage.ipynb b/scripts/paper/data_percentage.ipynb deleted file mode 100644 index e36e6605..00000000 --- a/scripts/paper/data_percentage.ipynb +++ /dev/null @@ -1,169 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "bd542ea1", - "metadata": {}, - "outputs": [], - "source": [ - "import pickle\n", - "from wild_visual_navigation import WVN_ROOT_DIR\n", - "import os\n", - "import seaborn as sns\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "\n", - "ws = \"_\" + os.environ[\"ENV_WORKSTATION_NAME\"]\n", - "name = \"data_percentage_all\"\n", - "with open(os.path.join(WVN_ROOT_DIR, f\"results/ablations/{name}{ws}/{name}_steps.pkl\"), \"rb\") as f:\n", - " res = pickle.load(f)\n", - "\n", - "\n", - "# GET RUN CONFIGURATION\n", - "all_percentages = []\n", - "all_steps = []\n", - "all_runs = []\n", - "for data in res:\n", - " data[\"model_path\"]\n", - " percentage, steps, run = data[\"percentage\"], data[\"steps\"], data[\"run\"]\n", - " all_runs.append(run)\n", - " all_percentages.append(percentage)\n", - " all_steps.append(steps)\n", - "NR_RUNS = np.unique(np.array(all_runs)).shape[0]\n", - "NR_TRAINIG_DATA = np.unique(np.array(all_percentages)).shape[0]\n", - "STEPS_DIV = np.unique(all_steps)[1] - np.unique(all_steps)[0]\n", - "NR_STEPS = np.unique(all_steps).shape[0]\n", - "\n", - "# CHANGE STORAGE FORMAT\n", - "\n", - "auroc_gt = {}\n", - "auroc_gt[\"forest\"] = np.zeros((NR_TRAINIG_DATA, NR_STEPS, NR_RUNS))\n", - "auroc_gt[\"grassland\"] = np.zeros((NR_TRAINIG_DATA, NR_STEPS, NR_RUNS))\n", - "auroc_gt[\"hilly\"] = np.zeros((NR_TRAINIG_DATA, NR_STEPS, NR_RUNS))\n", - "auroc_prop = {}\n", - "auroc_prop[\"forest\"] = np.zeros((NR_TRAINIG_DATA, NR_STEPS, NR_RUNS))\n", - "auroc_prop[\"grassland\"] = np.zeros((NR_TRAINIG_DATA, NR_STEPS, NR_RUNS))\n", - "auroc_prop[\"hilly\"] = np.zeros((NR_TRAINIG_DATA, NR_STEPS, NR_RUNS))\n", - "\n", - "\n", - "for data in res:\n", - " percentage, steps, run = data[\"percentage\"], data[\"steps\"], data[\"run\"]\n", - " try:\n", - " da = [v for v in data[\"results\"].values()][0]\n", - " auroc_gt[data[\"scene\"]][int(percentage / 10) - 1, int(steps / STEPS_DIV), run] = da[\"test_acc_gt_image\"]\n", - " auroc_prop[data[\"scene\"]][int(percentage / 10) - 1, int(steps / STEPS_DIV), run] = da[\"test_acc_self_image\"]\n", - " except:\n", - " pass" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "df98e062", - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "import matplotlib.pyplot as plt\n", - "\n", - "fontdict = {\n", - " \"fontsize\": 16,\n", - " \"fontweight\": plt.rcParams[\"axes.titleweight\"],\n", - " \"verticalalignment\": \"baseline\",\n", - " \"horizontalalignment\": \"center\",\n", - "}\n", - "\n", - "\n", - "def plot(title, data, env, ax0, ax1, plot_y, scene):\n", - " ax1.tick_params(axis=\"both\", which=\"major\", labelsize=12)\n", - "\n", - " # Plot Matrix\n", - " label_y = [str(k) for k in range(10, 101, 10)]\n", - " label_x = [str(200)] + [str(int(j / 1000)) + \"k\" for j in range(300, data.shape[1] * 102, 100)]\n", - "\n", - " im = ax1.imshow(data, cmap=sns.color_palette(\"RdYlBu\", as_cmap=True))\n", - " ax1.set_xticks([1, 21, 21 + 25, 21 + 50, 21 + 75])\n", - " ax1.set_yticks([0, 5, 9])\n", - " ax1.set_xticklabels([50, 250, 500, 750, 1000])\n", - " ax1.set_yticklabels([10, 50, 100])\n", - "\n", - " ax1.invert_yaxis()\n", - " ax1.set_xlabel(f\"{scene} - Training Steps\")\n", - "\n", - " if plot_y:\n", - " ax1.set_ylabel(\"Data Percentage\")\n", - "\n", - " ax0.xaxis.set_label_coords(0.5, -3)\n", - "\n", - " plt.setp(ax1.get_xticklabels(), rotation=0, ha=\"center\", rotation_mode=\"anchor\")\n", - " ax1.set_aspect(3.5)\n", - "\n", - " # Plot color bar\n", - " nr = 100\n", - " bar = np.arange(0, nr)[None]\n", - " im = ax0.imshow(bar, cmap=sns.color_palette(\"RdYlBu\", as_cmap=True))\n", - " ax0.set_aspect(2)\n", - " ax0.set_xticks(np.array([0, nr - 1]))\n", - " ax0.set_yticks(np.array([]))\n", - " ax0.set_xticklabels(np.array([round(data.min() * 100, 1), round(data.max() * 100, 1)]))\n", - " ax0.set_yticklabels([])\n", - "\n", - " ax1.tick_params(axis=\"both\", which=\"major\", labelsize=12)\n", - " ax1.tick_params(axis=\"both\", which=\"minor\", labelsize=12)\n", - " ax0.tick_params(axis=\"both\", which=\"major\", labelsize=12)\n", - " ax0.tick_params(axis=\"both\", which=\"minor\", labelsize=12)\n", - "\n", - " ax0.set_xlabel(\"ACC in percentage\", fontdict=fontdict)\n", - " ax0.xaxis.set_label_coords(0.5, -3)\n", - "\n", - "\n", - "scale = 2\n", - "width_half_inch, width_double_inch = 88.9 / 25.4, 182.0 / 25.4\n", - "height_inch = 33 / 25.4\n", - "\n", - "fig = plt.figure(figsize=(width_double_inch * scale, height_inch * scale), dpi=300)\n", - "\n", - "fig.set_tight_layout(False)\n", - "\n", - "gs1 = fig.add_gridspec(nrows=2, ncols=3, left=0.05, right=0.98, top=1, wspace=0.15, height_ratios=[1, 15])\n", - "ax0, ax1, ax2 = fig.add_subplot(gs1[0, 0]), fig.add_subplot(gs1[0, 1]), fig.add_subplot(gs1[0, 2])\n", - "gs2 = fig.add_gridspec(nrows=2, ncols=3, left=0.05, right=0.98, top=1.2, wspace=0.15, height_ratios=[1, 5])\n", - "\n", - "ax3 = fig.add_subplot(gs2[1, 0])\n", - "ax4, ax5 = fig.add_subplot(gs2[1, 1], sharex=ax3), fig.add_subplot(gs2[1, 2], sharex=ax3)\n", - "\n", - "plot(\"Hilly\", auroc_gt[\"hilly\"][:, 3:].mean(axis=2), \"hilly\", ax0, ax3, True, \"Hilly\")\n", - "plot(\"Forest\", auroc_gt[\"forest\"][:, 3:].mean(axis=2), \"forest\", ax1, ax4, False, \"Forest\")\n", - "\n", - "da = auroc_gt[\"grassland\"][:, 3:].mean(axis=2)\n", - "plot(\"Grass\", da, \"grassland\", ax2, ax5, False, \"Grass\")\n", - "\n", - "fig.savefig(f\"/tmp/img.png\")\n", - "fig.savefig(f\"/tmp/img.pdf\")" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "wvn", - "language": "python", - "name": "wvn" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/scripts/paper/get_times_of_perugia_dataset.py b/scripts/paper/get_times_of_perugia_dataset.py deleted file mode 100644 index e60f896c..00000000 --- a/scripts/paper/get_times_of_perugia_dataset.py +++ /dev/null @@ -1,27 +0,0 @@ -from pathlib import Path -from wild_visual_navigation.learning.utils import load_env -import os - -perugia_root = os.path.join(load_env()["perugia_root"], "wvn_output/split") -ls = [str(s) for s in Path(perugia_root).rglob("*.txt") if str(s).find("desc") == -1] -import numpy as np - -for l in ls: - pa = [] - with open(l, "r") as f: - while True: - - line = f.readline() - if not line: - break - - pa.append(line.strip()) - folder = np.unique(np.array([s.split("/")[:-2] for s in pa])) - - mission = np.unique(np.array([s.split("/")[-3] for s in pa])) - arr = np.array([float(s.split("/")[-1][:-3].replace("_", ".")) for s in pa]) - delta_t = 0 - for m in mission: - mask = np.array([s.split("/")[-3] for s in pa]) == m - delta_t += arr[mask][-1] - arr[mask][0] - print(delta_t) diff --git a/scripts/paper/learning_curve.ipynb b/scripts/paper/learning_curve.ipynb deleted file mode 100644 index d739381d..00000000 --- a/scripts/paper/learning_curve.ipynb +++ /dev/null @@ -1,363 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "38cca5da", - "metadata": {}, - "outputs": [], - "source": [ - "import matplotlib\n", - "import pickle\n", - "from wild_visual_navigation import WVN_ROOT_DIR\n", - "import os\n", - "import seaborn as sns\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "\n", - "# prev learning_curve\n", - "base = \"results/ablations/time_adaptation-1000_ge76\"\n", - "\n", - "with open(os.path.join(WVN_ROOT_DIR, base, \"time_adaptation-1000_steps.pkl\"), \"rb\") as f:\n", - " runs = pickle.load(f)\n", - "scene = \"forest\"" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5548cbce", - "metadata": {}, - "outputs": [], - "source": [ - "store_every_n_steps = 1\n", - "nr_runs = int(np.array([run[\"run\"] for run in runs]).max()) + 1\n", - "max_step = np.array([run[\"steps\"] for run in runs]).max()\n", - "max_steps = int(max_step / store_every_n_steps) + 1\n", - "\n", - "y_names = [\n", - " \"test_auroc_gt_image\",\n", - " \"test_auroc_self_image\",\n", - " \"test_auroc_anomaly_gt_image\",\n", - " \"test_auroc_anomaly_self_image\",\n", - " \"test_acc_gt_image\",\n", - " \"test_acc_self_image\",\n", - " \"test_acc_anomaly_gt_image\",\n", - " \"test_acc_anomaly_self_image\",\n", - " \"trainer_logged_metrictest_loss_reco\",\n", - " \"trainer_logged_metrictest_loss_trav\",\n", - " \"trainer_logged_metrictest_loss\",\n", - "]\n", - "\n", - "y = {k: np.zeros((max_steps, nr_runs)) for k in y_names}\n", - "x_steps = np.arange(0, max_step + store_every_n_steps, store_every_n_steps)\n", - "\n", - "model_paths = [\"nan\"] * max_steps\n", - "for run in runs:\n", - " step = int(run[\"steps\"] / store_every_n_steps)\n", - " for y_name in y_names:\n", - " try:\n", - " y[y_name][step, run[\"run\"]] = run[\"results\"][scene][y_name]\n", - " except:\n", - " pass\n", - " model_paths[step] = run[\"model_path\"]\n", - "\n", - "y[\"trainer_logged_metrictest_loss_trav\"] *= 16.666666" - ] - }, - { - "cell_type": "code", - "execution_count": null, - "id": "5a3ee9c8", - "metadata": { - "scrolled": false - }, - "outputs": [], - "source": [ - "###################################### AUROC ####################################################\n", - "import seaborn as sns\n", - "import matplotlib.pyplot as plt\n", - "from wild_visual_navigation.visu import paper_colors_rgb_u8, paper_colors_rgba_u8\n", - "from wild_visual_navigation.visu import paper_colors_rgb_f, paper_colors_rgba_f\n", - "\n", - "width_singel_inch, width_double_inch = 88.9 / 25.4, 182.0 / 25.4\n", - "height_inch = 82 / 25.4\n", - "scale = 2\n", - "sns.set_style(\"darkgrid\")\n", - "plt.rcParams.update({\"font.size\": 16})\n", - "\n", - "fig = plt.figure(figsize=(width_double_inch * scale, height_inch * scale), dpi=300)\n", - "gs = fig.add_gridspec(nrows=3, ncols=8, left=0.035, right=0.98, top=0.97, wspace=0.9, hspace=0.0)\n", - "gs = fig.add_gridspec(nrows=3, ncols=8, left=0.035, right=0.98, top=0.97, wspace=0.9, height_ratios=[1.4, 1, 1])\n", - "ax0 = fig.add_subplot(gs[0, 0:4])\n", - "ax1 = fig.add_subplot(gs[0, 4:])\n", - "\n", - "# fig, ax = plt.subplots(1, 1, figsize=(width_singel_inch*scale, height_inch*scale), dpi=300)\n", - "ax0.tick_params(axis=\"both\", which=\"major\", labelsize=12)\n", - "ax1.tick_params(axis=\"both\", which=\"major\", labelsize=12)\n", - "\n", - "\n", - "fig.set_tight_layout(True)\n", - "\n", - "\n", - "def plot_auroc(keys, x_steps, y, lim_min, lim_max, ax, y_tags, y_axis_labels, two_axis=False):\n", - " x = [x_steps for k in keys]\n", - " y_mean = [y[k].mean(axis=1) for k in keys]\n", - " y_lower = [y[k].mean(axis=1) - y[k].std(axis=1) for k in keys]\n", - " y_upper = [y[k].mean(axis=1) + y[k].std(axis=1) for k in keys]\n", - " # not used\n", - " ax_ori = ax\n", - "\n", - " for j, (_x, _y, _y_lower, _y_upper, _y_tag) in enumerate(zip(x, y_mean, y_lower, y_upper, y_tags)):\n", - " k = [k for k in paper_colors_rgb_f.keys()][j]\n", - " if two_axis and j == 1:\n", - " ax.plot([0], [0], label=_y_tag, color=paper_colors_rgb_f[k])\n", - " ax.legend()\n", - "\n", - " ax.set_ylabel(y_axis_labels[j])\n", - "\n", - " if two_axis and j == 1:\n", - " ax = ax.twinx()\n", - "\n", - " ax.plot(_x, _y, label=_y_tag, color=paper_colors_rgb_f[k])\n", - " if not (_y_lower is None):\n", - " ax.plot(_x, _y_lower, color=paper_colors_rgb_f[k + \"_light\"], alpha=0.1)\n", - " ax.plot(_x, _y_upper, color=paper_colors_rgb_f[k + \"_light\"], alpha=0.1)\n", - " ax.fill_between(_x, _y_lower, _y_upper, color=paper_colors_rgb_f[k + \"_light\"], alpha=0.2)\n", - "\n", - " if not (two_axis and j == 1):\n", - " ax.legend()\n", - "\n", - " ax.tick_params(axis=\"both\", which=\"major\", labelsize=12)\n", - "\n", - " # ax.plot(np.linspace(0, 1, 100), np.linspace(0, 1, 100), linestyle=\"--\", color=\"gray\")\n", - "\n", - " ax_ori.set_xlabel(\"Training Steps\")\n", - " ax_ori.spines[\"top\"].set_visible(False)\n", - " ax_ori.spines[\"right\"].set_visible(False)\n", - " label_x = [0, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000]\n", - " ax_ori.set_xticks([0, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000])\n", - " ax_ori.set_xticklabels(label_x)\n", - "\n", - " if not two_axis:\n", - " ax_ori.set_ylabel(y_axis_labels[0])\n", - "\n", - " # plt.xlim(0, 1)\n", - " ax_ori.set_ylim(lim_min, lim_max)\n", - "\n", - "\n", - "# plot_auroc(\n", - "# [\"test_auroc_gt_image\", \"test_auroc_self_image\"],\n", - "# x_steps,\n", - "# y,\n", - "# 0.6,\n", - "# 1,\n", - "# ax0,\n", - "# [\"GT AUROC\", \"SELF AUROC\"],\n", - "# y_axis_labels=[\"AUROC\"],\n", - "# )\n", - "plot_auroc(\n", - " [\"trainer_logged_metrictest_loss_reco\", \"trainer_logged_metrictest_loss_trav\"],\n", - " x_steps,\n", - " y,\n", - " None,\n", - " None,\n", - " ax0,\n", - " [\"Reco\", \"Trav\"],\n", - " y_axis_labels=[\"Loss\"],\n", - ")\n", - "\n", - "plot_auroc(\n", - " [\"test_acc_gt_image\", \"test_acc_self_image\"],\n", - " x_steps,\n", - " y,\n", - " None,\n", - " None,\n", - " ax1,\n", - " [\"GT Acc\", \"SELF Acc\"],\n", - " y_axis_labels=[\"Accuracy\", \"Self Acc\"],\n", - " two_axis=False,\n", - ")\n", - "\n", - "\n", - "###################################### Images ####################################################\n", - "\n", - "\n", - "# Setup dataloader\n", - "from wild_visual_navigation.learning.dataset import get_ablation_module\n", - "from wild_visual_navigation.learning.utils import load_env\n", - "from wild_visual_navigation import WVN_ROOT_DIR\n", - "from wild_visual_navigation.cfg import ExperimentParams\n", - "\n", - "from wild_visual_navigation.visu import LearningVisualizer\n", - "from PIL import Image\n", - "from wild_visual_navigation.learning.model import get_model\n", - "from dataclasses import asdict\n", - "import torch\n", - "\n", - "with open(os.path.join(WVN_ROOT_DIR, base, \"experiment_params.pkl\"), \"rb\") as f:\n", - " exp = pickle.load(f)\n", - "exp.ablation_data_module\n", - "\n", - "env = load_env()\n", - "\n", - "exp.ablation_data_module\n", - "ablation_data_module = {\n", - " \"batch_size\": 1,\n", - " \"num_workers\": 0,\n", - " \"env\": scene,\n", - " \"feature_key\": exp.ablation_data_module.feature_key,\n", - " \"test_equals_val\": False,\n", - " \"val_equals_test\": False,\n", - " \"test_all_datasets\": False,\n", - " \"training_data_percentage\": 100,\n", - " \"training_in_memory\": False,\n", - "}\n", - "train_loader, val_loader, test_loader = get_ablation_module(**ablation_data_module, perugia_root=env[\"perugia_root\"])\n", - "test_scenes = [a.dataset.env for a in test_loader]\n", - "test_all_datasets = True\n", - "\n", - "\n", - "def load_model(model_cfg: ExperimentParams.ModelParams, checkpoint_path: str):\n", - " model = get_model(asdict(model_cfg))\n", - " ckpt = torch.load(checkpoint_path)\n", - " ckpt = {\n", - " k.replace(\"_model.\", \"\"): v\n", - " for k, v in ckpt.items()\n", - " if k.find(\"_traversability\") == -1 and k.find(\"threshold\") == -1\n", - " }\n", - " res = model.load_state_dict(ckpt, strict=True)\n", - " return model\n", - "\n", - "\n", - "fontdict = {\n", - " \"fontsize\": 16,\n", - " \"fontweight\": plt.rcParams[\"axes.titleweight\"],\n", - " \"verticalalignment\": \"baseline\",\n", - " \"horizontalalignment\": \"center\",\n", - "}\n", - "\n", - "if True:\n", - " for l, img_nr in enumerate([20, 32]):\n", - " if l == 0:\n", - " gs2 = fig.add_gridspec(nrows=3, ncols=8, left=0.06, right=0.97, top=1.0, wspace=0.1, hspace=0.1)\n", - " gs2 = fig.add_gridspec(\n", - " nrows=3, ncols=8, left=0.03, right=0.98, top=1.0, wspace=0.1, height_ratios=[1.7, 3, 0.1]\n", - " )\n", - "\n", - " if l == 1:\n", - " gs2 = fig.add_gridspec(nrows=3, ncols=8, left=0.06, right=0.97, top=1.0, wspace=0.1, hspace=0.1)\n", - " gs2 = fig.add_gridspec(\n", - " nrows=3, ncols=8, left=0.03, right=0.98, top=1.0, bottom=0, wspace=0.1, height_ratios=[1.3, 2.15, 2.0]\n", - " )\n", - "\n", - " thresholds = []\n", - " models = [0, 99, 249, 499, 999]\n", - " for j, training_step in enumerate(models):\n", - " model = load_model(exp.model, model_paths[training_step])\n", - " model.to(\"cuda\")\n", - " graph = test_loader[0].dataset[img_nr]\n", - " pred = model(graph)\n", - " visualizer = LearningVisualizer()\n", - " img = graph.img\n", - " seg = graph.seg\n", - " res = visualizer.plot_detectron(img[0], graph.label[0].type(torch.long), not_log=True, max_seg=2)\n", - " center = graph.center\n", - "\n", - " threshold = torch.load(model_paths[training_step])[\"threshold\"].item()\n", - " thresholds.append(threshold)\n", - " traversability = pred[:, 0]\n", - " m = traversability < threshold\n", - " # Scale untraversable\n", - " traversability[m] *= 0.5 / threshold\n", - " # Scale traversable\n", - " traversability[~m] -= threshold\n", - " traversability[~m] *= 0.5 / (1 - threshold)\n", - " traversability[~m] += 0.5\n", - " traversability = traversability.clip(0, 1)\n", - "\n", - " buffer_traversability = graph.seg.clone().type(torch.float32).flatten()\n", - " BS, H, W = graph.seg.shape\n", - " seg_pixel_index = (graph.seg).flatten()\n", - " buffer_traversability = traversability[seg_pixel_index].reshape(BS, H, W)\n", - " res_pred = visualizer.plot_detectron_classification(img[0], buffer_traversability, not_log=True)\n", - "\n", - " # res_pred = visualizer.plot_traversability_graph_on_seg(\n", - " # traversability, seg[0], graph, center, img[0], not_log=True, colorize_invalid_centers=True\n", - " # )\n", - "\n", - " ax = fig.add_subplot(gs2[1 + l, j + 2])\n", - " ax.imshow(res_pred)\n", - " ax.axis(\"off\")\n", - " step = int((training_step + 1) * 1)\n", - " if l == 1:\n", - " ax.set_title(f\"Step-{step}\", fontdict=fontdict, loc=\"center\", y=0, pad=-16)\n", - "\n", - " # graph.seg[0]\n", - " res_img = visualizer.plot_detectron(\n", - " img[0],\n", - " graph.label[0].type(torch.long) * 99,\n", - " not_log=True,\n", - " max_seg=100,\n", - " colormap=\"RdYlBu\",\n", - " boundary_seg=None,\n", - " alpha=0,\n", - " )\n", - " res_gt = visualizer.plot_detectron(\n", - " img[0],\n", - " graph.label[0].type(torch.long) * 99,\n", - " not_log=True,\n", - " max_seg=100,\n", - " colormap=\"RdYlBu\",\n", - " boundary_seg=None,\n", - " )\n", - " res_prop = visualizer.plot_traversability_graph_on_seg(\n", - " graph.y, seg[0], graph, center, img[0], not_log=True, colorize_invalid_centers=True\n", - " )\n", - "\n", - " ax0 = fig.add_subplot(gs2[1 + l, 0])\n", - " ax1 = fig.add_subplot(gs2[1 + l, 1])\n", - " ax7 = fig.add_subplot(gs2[1 + l, 7])\n", - "\n", - " res = ax0.text(-110, 370, s=f\"Example {l+1}\", fontsize=16, rotation=90)\n", - "\n", - " if l == 1:\n", - " ax0.set_title(\"Image\", fontdict=fontdict, loc=\"center\", y=0, pad=-16)\n", - " ax1.set_title(\"Supervision\", fontdict=fontdict, loc=\"center\", y=0, pad=-16)\n", - " ax7.set_title(\"Label\", fontdict=fontdict, loc=\"center\", y=0, pad=-16)\n", - "\n", - " ax0.axis(\"off\")\n", - " ax1.axis(\"off\")\n", - " ax7.axis(\"off\")\n", - "\n", - " ax0.imshow(res_img)\n", - " ax1.imshow(res_prop)\n", - " ax7.imshow(res_gt)\n", - "\n", - "fig.set_tight_layout(False)\n", - "fig.savefig(\"/tmp/img.png\", dpi=300)" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "wvn", - "language": "python", - "name": "wvn" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/scripts/paper/model_timing.ipynb b/scripts/paper/model_timing.ipynb deleted file mode 100644 index d0c0845c..00000000 --- a/scripts/paper/model_timing.ipynb +++ /dev/null @@ -1,77 +0,0 @@ -{ - "cells": [ - { - "cell_type": "code", - "execution_count": null, - "id": "dcd980a3", - "metadata": {}, - "outputs": [], - "source": [ - "# %matplotlib notebook\n", - "\n", - "from wild_visual_navigation import WVN_ROOT_DIR\n", - "import os\n", - "import seaborn as sns\n", - "import matplotlib.pyplot as plt\n", - "import numpy as np\n", - "import pandas as pd\n", - "import pickle\n", - "\n", - "folder = \"time_extractor\"\n", - "ws = os.environ[\"ENV_WORKSTATION_NAME\"]\n", - "p = os.path.join(WVN_ROOT_DIR, f\"results/ablations/{folder}_ablation_{ws}/{folder}_ablation_test_results.pkl\")\n", - "\n", - "with open(p, \"rb\") as f:\n", - " res = pickle.load(f)\n", - "\n", - "import scipy\n", - "import pandas as pd\n", - "import pickle\n", - "import copy\n", - "\n", - "data = {\n", - " \"cfg_name\": [],\n", - " \"runtime_mean\": [],\n", - " \"runtime_std\": [],\n", - " \"runtime_warmup_mean\": [],\n", - " \"runtime_warmup_std\": [],\n", - " \"parameter\": [],\n", - "}\n", - "\n", - "for name, res in res.items():\n", - " data[\"cfg_name\"].append(name)\n", - " a = np.array(res[\"times_eval\"])\n", - " data[\"runtime_mean\"].append(a.mean())\n", - " data[\"runtime_std\"].append(a.std())\n", - " a = np.array(res[\"times_warmup\"])\n", - " data[\"runtime_warmup_mean\"].append(a.mean())\n", - " data[\"runtime_warmup_std\"].append(a.std())\n", - " data[\"parameter\"].append(res[\"parameter\"])\n", - "\n", - "df = pd.DataFrame.from_dict(data)\n", - "df" - ] - } - ], - "metadata": { - "kernelspec": { - "display_name": "wvn", - "language": "python", - "name": "wvn" - }, - "language_info": { - "codemirror_mode": { - "name": "ipython", - "version": 3 - }, - "file_extension": ".py", - "mimetype": "text/x-python", - "name": "python", - "nbconvert_exporter": "python", - "pygments_lexer": "ipython3", - "version": "3.8.13" - } - }, - "nbformat": 4, - "nbformat_minor": 5 -} diff --git a/scripts/paper/ros_gps_to_tf_publisher.py b/scripts/paper/ros_gps_to_tf_publisher.py deleted file mode 100644 index de05a49a..00000000 --- a/scripts/paper/ros_gps_to_tf_publisher.py +++ /dev/null @@ -1,67 +0,0 @@ -import rospy -import tf -from geometry_msgs.msg import PointStamped -import os -import subprocess -import time - - -def callback(point_stamped, nr): - br = tf.TransformBroadcaster() - br.sendTransform( - (-point_stamped.point.x, -point_stamped.point.y, -point_stamped.point.z), - (0, 0, 0, 1), - rospy.Time.now(), - "map", - f"sensor{str(nr)}", - ) - - -root = "/media/Data/Datasets/2022_Perugia/day3/mission_data" -bags = { - "3": "2022-05-12T09-45-07_mission_0_day_3", - "2": "2022-05-12T09-57-13_mission_0_day_3", - "1": "2022-05-12T10-18-16_mission_0_day_3", - "4": "2022-05-12T10-34-03_mission_0_day_3", - "5": "2022-05-12T10-45-20_mission_0_day_3", -} - -bags = { - "1": "2022-05-12T17-36-33_mission_0_day_3", - "2": "2022-05-12T15-36-30_mission_0_day_3", - "3": "2022-05-12T15-52-37_mission_0_day_3", - "4": "2022-05-12T18-21-23_mission_0_day_3", -} - -bags = { - "1": "2022-05-12T11-56-13_mission_0_day_3", - "2": "2022-05-12T11-44-56_mission_0_day_3", - "3": "2022-05-12T12-08-09_mission_0_day_3", -} - -# Launch rosmaster -# roscore_process = subprocess.Popen(["roscore"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, env=env) -# roscore_process.kill() -print(" roslaunch wild_visual_navigation_ros trajectory_server.launch") -# trajectory_process = subprocess.Popen(["source /home/jonfrey/catkin_ws/devel/setup.bash && roslaunch wild_visual_navigation_ros trajectory_server.launch"], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) -# trajectory_process.kill() -for i, m in bags.items(): - cmd = f"rosbag play {root}/{m}/*gps*.bag -r 10 /rover/piksi/position_receiver_0/ros/pos_enu:=/rover/piksi/position_receiver_0/ros/pos_enu{i}" - print(cmd) - # Run simulation - # try: - # subprocess.call( - # ["rosbag", "play", f"{root}/{m}/*tf*.bag", "-r", "10", f"/rover/piksi/position_receiver_0/ros/pos_enu:=/rover/piksi/position_receiver_0/ros/pos_enu{i}"], - # stdout=subprocess.DEVNULL, - # stderr=subprocess.DEVNULL, - # )rosbag play /media/Data/Datasets/2022_Perugia/day3/mission_data/2022-05-12T09-45-07_mission_0_day_3/*tf*.bag -r 10 /rover/piksi/position_receiver_0/ros/pos_enu:=/rover/piksi/position_receiver_0/ros/pos_enu1 - # except: - # print("error") - -rospy.init_node("point_to_tf") -sub = rospy.Subscriber("/rover/piksi/position_receiver_0/ros/pos_enu1", PointStamped, callback, 1) -sub = rospy.Subscriber("/rover/piksi/position_receiver_0/ros/pos_enu2", PointStamped, callback, 2) -sub = rospy.Subscriber("/rover/piksi/position_receiver_0/ros/pos_enu3", PointStamped, callback, 3) -sub = rospy.Subscriber("/rover/piksi/position_receiver_0/ros/pos_enu4", PointStamped, callback, 4) -sub = rospy.Subscriber("/rover/piksi/position_receiver_0/ros/pos_enu5", PointStamped, callback, 5) -rospy.spin() diff --git a/scripts/paper/ros_odom_to_tf_publisher.py b/scripts/paper/ros_odom_to_tf_publisher.py deleted file mode 100644 index 39fe1aeb..00000000 --- a/scripts/paper/ros_odom_to_tf_publisher.py +++ /dev/null @@ -1,25 +0,0 @@ -import rospy -import tf -from geometry_msgs.msg import PoseStamped - -i = 0 - - -def pose_callback(data): - br = tf.TransformBroadcaster() - br.sendTransform( - (data.pose.position.x, data.pose.position.y, data.pose.position.z), - (data.pose.orientation.x, data.pose.orientation.y, data.pose.orientation.z, data.pose.orientation.w), - data.header.stamp, - "base", - "abc", - ) - print(data.header.frame_id) - print(data.header.stamp) - print("published") - - -if __name__ == "__main__": - rospy.init_node("pose_to_tf_node_new") - rospy.Subscriber("/mapping_node/o3d_slam_lidar_pose_in_map", PoseStamped, pose_callback) - rospy.spin() diff --git a/scripts/paper/time_extractor.py b/scripts/paper/time_extractor.py deleted file mode 100644 index e2e1b386..00000000 --- a/scripts/paper/time_extractor.py +++ /dev/null @@ -1,97 +0,0 @@ -from wild_visual_navigation.feature_extractor import FeatureExtractor -import os -from pathlib import Path -import torch -import pickle -from wild_visual_navigation import WVN_ROOT_DIR - -if __name__ == "__main__": - - folder = "time_extractor" - warm_up_runs = 10 - hot_runs = 10 - special_key = "" - - # Setup folder - ws = os.environ["ENV_WORKSTATION_NAME"] - res_folder = os.path.join(WVN_ROOT_DIR, f"results/ablations/{folder}_ablation_{ws}") - p = os.path.join(res_folder, f"{folder}_ablation_test_results{special_key}.pkl") - Path(p).parent.mkdir(parents=True, exist_ok=True) - try: - os.remove(p) - except OSError as error: - pass - device = "cuda" if torch.cuda.is_available() else "cpu" - - # Create a dictionary of feature extractors. - fes = {} - fes["slic100_dino224_16"] = FeatureExtractor( - device, "slic", "dino", 224, model_type="vit_small", patch_size=16, slic_num_components=100 - ) - fes["slic100_sift"] = FeatureExtractor(device, "slic", "sift", slic_num_components=100) - fes["slic100_efficientnet_b4"] = FeatureExtractor( - device, "slic", "torchvision", (384, 380), model_type="efficientnet_b4", slic_num_components=100 - ) - fes["slic100_resnet50"] = FeatureExtractor( - device, "slic", "torchvision", 448, model_type="resnet50", slic_num_components=100 - ) - fes["slic100_resnet50_dino"] = FeatureExtractor( - device, "slic", "torchvision", 448, model_type="resnet50_dino", slic_num_components=100 - ) - - res = {} - for extractor_name, fe in fes.items(): - print(extractor_name) - input_size = fe._input_size - if not isinstance(input_size, tuple): - input_size = (input_size, input_size) - - times_warmup = [] - for i in range(warm_up_runs): - img = torch.rand((1, 3, input_size[0], input_size[1])) - img.to(device) - start = torch.cuda.Event(enable_timing=True) - end = torch.cuda.Event(enable_timing=True) - start.record() - feat = fe.compute_features(img, None, None) - end.record() - torch.cuda.synchronize() - t = start.elapsed_time(end) - times_warmup.append(t) - - times_eval = [] - for i in range(hot_runs): - img = torch.rand((1, 3, input_size[0], input_size[1])) - img.to(device) - start = torch.cuda.Event(enable_timing=True) - end = torch.cuda.Event(enable_timing=True) - start.record() - feat = fe.compute_features(img, None, None) - end.record() - torch.cuda.synchronize() - t = start.elapsed_time(end) - times_eval.append(t) - try: - model = fe.extractor.model - model.train() - params = sum(p.numel() for p in model.parameters() if p.requires_grad) - - except: - try: - model = fe._extractor - params = sum(p.numel() for p in model.parameters() if p.requires_grad) - except: - params = 0 - - res[extractor_name] = { - "times_eval": times_eval, - "times_warmup": times_warmup, - "parameter": params, - "warm_up_runs": warm_up_runs, - "hot_runs": hot_runs, - "device": torch.cuda.get_device_name(), - "input_size": input_size, - } - - with open(p, "wb") as handle: - pickle.dump(res, handle, protocol=pickle.HIGHEST_PROTOCOL) diff --git a/scripts/train.py b/scripts/train.py deleted file mode 100644 index f59e29aa..00000000 --- a/scripts/train.py +++ /dev/null @@ -1,26 +0,0 @@ -from simple_parsing import ArgumentParser -from wild_visual_navigation.cfg import ExperimentParams -from wild_visual_navigation.learning.general import training_routine -from wild_visual_navigation.learning.utils import load_yaml -from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.utils import override_params -import os - - -if __name__ == "__main__": - """ - WARNING: the command line arguments are overriden based on the enviornment file! Not the inutive other way around - """ - - parser = ArgumentParser() - parser.add_arguments(ExperimentParams, dest="experiment") - - parser.add_argument("--exp", type=str, default="nan", help="Overwrite params") - args = parser.parse_args() - - p = os.path.join(WVN_ROOT_DIR, "cfg/exp", args.exp) - if args.exp != "nan" and os.path.isfile(p): - exp_override = load_yaml(p) - args.experiment = override_params(args.experiment, exp_override) - - training_routine(args.experiment) diff --git a/scripts/train_optuna.py b/scripts/train_optuna.py deleted file mode 100644 index 1b4815c1..00000000 --- a/scripts/train_optuna.py +++ /dev/null @@ -1,82 +0,0 @@ -import optuna -from simple_parsing import ArgumentParser -import copy -import os -import neptune.new.integrations.optuna as optuna_utils -import torch - -from wild_visual_navigation.cfg import ExperimentParams -from wild_visual_navigation.learning.general import training_routine -from wild_visual_navigation.learning.utils import get_neptune_run -from wild_visual_navigation.utils import override_params -from wild_visual_navigation.learning.utils import load_yaml -from wild_visual_navigation import WVN_ROOT_DIR - - -def objective(trial, experiment: ExperimentParams): - exp = copy.deepcopy(experiment) - - # exp.optimizer.lr = trial.suggest_float("lr", 0.0001, 0.01, log=True) - exp.loss.w_trav = trial.suggest_float("w_trav", 0.0, 1.0) - exp.loss.w_temp = trial.suggest_float("w_temp", 0.0, 1.0) - exp.loss.w_reco = trial.suggest_float("w_reco", 0.0, 1.0) - exp.loss.anomaly_balanced = trial.suggest_categorical("anomaly_balanced", [True]) - - # if not trial.suggest_categorical("use_temporal_consistency", [True, False]): - # exp.loss.w_temp = 0.0 - # exp.trainer.max_epochs = trial.suggest_int("max_epochs", 1, 10) - - res, _ = training_routine(exp) - - torch.cuda.empty_cache() - return list(res.values())[0]["test_auroc_gt_image"] - - -if __name__ == "__main__": - parser = ArgumentParser() - - parser.add_arguments(ExperimentParams, dest="experiment") - parser.add_argument("--exp", type=str, default="nan", help="Overwrite params") - - parser.add_argument("--name", type=str, default="sweep_loss_function", help="Name of sweep") - parser.add_argument("--n_trials", type=int, default=100, help="Number Trials") - - args = parser.parse_args() - - p = os.path.join(WVN_ROOT_DIR, "cfg/exp", args.exp) - if args.exp != "nan" and os.path.isfile(p): - exp_override = load_yaml(p) - args.experiment = override_params(args.experiment, exp_override) - - args.experiment.general.name = os.path.join(args.name, args.experiment.general.name) - - run = get_neptune_run(neptune_project_name=args.experiment.logger.neptune_project_name, tags=["optuna", args.name]) - - neptune_callback = optuna_utils.NeptuneCallback( - run, - plots_update_freq=2, - log_plot_slice=True, - log_plot_contour=True, - ) - - args.experiment.logger.name = "skip" - args.experiment.trainer.enable_checkpointing = False - args.experiment.cb_checkpoint.active = False - args.experiment.visu.train = 0 - args.experiment.visu.val = 0 - args.experiment.visu.test = 0 - args.experiment.trainer.check_val_every_n_epoch = 100000 - args.experiment.general.store_model_every_n_steps = None - args.experiment.ablation_data_module.training_in_memory = True - args.experiment.trainer.max_steps = 1000 - args.experiment.trainer.max_epochs = None - args.experiment.general.log_to_disk = False - args.experiment.trainer.progress_bar_refresh_rate = 0 - args.experiment.trainer.weights_summary = None - args.experiment.trainer.enable_progress_bar = False - - binded_objective = lambda trial: objective(trial=trial, experiment=args.experiment) - study = optuna.create_study(direction=optuna.study.StudyDirection.MAXIMIZE) - study.optimize(binded_objective, n_trials=args.n_trials, callbacks=[neptune_callback]) - - trial = study.best_trial diff --git a/setup.py b/setup.py index 7b31d434..6d0d5d64 100644 --- a/setup.py +++ b/setup.py @@ -1,13 +1,49 @@ from setuptools import find_packages from distutils.core import setup +# Minimum dependencies required prior to installation +# TODO: If we add the "opencv-python>=4.6", it wont build on the jetson + +INSTALL_REQUIRES = [ + # generic + "numpy", + "tqdm", + "kornia>=0.6.5", + "pip", + "torchvision", + "torch>=1.21", + "torchmetrics", + "pytorch_lightning>=1.6.5", + "pytest", + "scipy", + "scikit-image", + "scikit-learn", + "matplotlib", + "seaborn", + "pandas", + "pytictac", + "torch_geometric", + "omegaconf", + "optuna", + "neptune", + "fast-slic", + "hydra-core", + "prettytable", + "termcolor", + "pydensecrf@git+https://github.com/lucasb-eyer/pydensecrf.git", + "liegroups@git+https://github.com/mmattamala/liegroups", + "wget", + "rospkg", +] setup( name="wild_visual_navigation", version="0.0.1", author="Jonas Frey, Matias Mattamala", - author_email="jonfrey@ethz.ch, matias@leggedrobotics.com", + author_email="jonfrey@ethz.ch, matias@robots.oex.ac.uk", packages=find_packages(), - python_requires=">=3.6", + python_requires=">=3.7", description="A small example package", - install_requires=["kornia>=0.6.5"], + install_requires=[INSTALL_REQUIRES], + dependencies=["https://download.pytorch.org/whl/torch-2.1.0+cu121-cp38-cp38-linux_x86_64.whl"], + dependency_links=["https://download.pytorch.org/whl/torch-2.1.0+cu121-cp38-cp38-linux_x86_64.whl"], ) diff --git a/tests/test_confidence_generator.py b/tests/test_confidence_generator.py index f69d0d26..e2ab2bb1 100644 --- a/tests/test_confidence_generator.py +++ b/tests/test_confidence_generator.py @@ -2,7 +2,6 @@ import matplotlib.pyplot as plt import torch import torch.nn.functional as F -import numpy as np def generate_traversability_test_signal(N=500, T=10, events=[3, 8], event_length=0.5, device=torch.device("cpu")): @@ -13,7 +12,7 @@ def generate_traversability_test_signal(N=500, T=10, events=[3, 8], event_length event_length = [event_length] * len(events) for e, l in zip(events, event_length): - half_event = l / 2 + # half_event = l / 2 ini_event = e end_event = e + l x += torch.sigmoid(10 * (t - ini_event)) - torch.sigmoid(10 * (t - end_event)) @@ -21,6 +20,13 @@ def generate_traversability_test_signal(N=500, T=10, events=[3, 8], event_length def test_confidence_generator(): + from wild_visual_navigation.utils.testing import make_results_folder + from wild_visual_navigation.visu import get_img_from_fig + from os.path import join + + # Create test directory + outpath = make_results_folder("test_confidence_generator") + device = torch.device("cpu") N = 1000 sigma_factor = 0.5 @@ -67,7 +73,7 @@ def test_confidence_generator(): conf = torch.zeros((3, N), device=device) # Naive confidence generator - cg = ConfidenceGenerator(std_factor=sigma_factor).to(device) + cg = ConfidenceGenerator(std_factor=sigma_factor, method="latest_measurement").to(device) # Run for i in range(x.shape[0]): @@ -104,7 +110,13 @@ def test_confidence_generator(): # Signal subplot axs[0].plot(t_np, x_noisy_np, label="Reconstructed signal", color="r") axs[0].plot(t_np, x_np, label="Target signal", color="k") - axs[0].plot(t_np, x_noisy_positive_np, label="Positive samples", color=(0.5, 0, 0), marker=".") + axs[0].plot( + t_np, + x_noisy_positive_np, + label="Positive samples", + color=(0.5, 0, 0), + marker=".", + ) axs[0].set_ylabel("Signals") axs[0].legend(loc="upper right") @@ -144,7 +156,13 @@ def test_confidence_generator(): axs[2].set_ylabel("Confidence") axs[2].legend(loc="upper right") - plt.show() + img = get_img_from_fig(fig) + img.save( + join( + outpath, + "confidence_generator_test.png", + ) + ) if __name__ == "__main__": diff --git a/tests/test_configuration.py b/tests/test_configuration.py new file mode 100644 index 00000000..13055398 --- /dev/null +++ b/tests/test_configuration.py @@ -0,0 +1,17 @@ +from wild_visual_navigation.cfg import RosLearningNodeParams +from omegaconf import OmegaConf +from omegaconf import read_write +from omegaconf.dictconfig import DictConfig + + +def test_configuration(): + cfg = OmegaConf.structured(RosLearningNodeParams) + print(cfg) + with read_write(cfg): + cfg.image_callback_rate = 1.0 + + print(cfg.image_callback_rate) + + +if __name__ == "__main__": + test_configuration() diff --git a/tests/test_dino_time.py b/tests/test_dino_time.py index d96f4ce8..ce6b34a0 100644 --- a/tests/test_dino_time.py +++ b/tests/test_dino_time.py @@ -1,36 +1,38 @@ -from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.utils import Timer -from wild_visual_navigation.feature_extractor import DinoInterface, DinoTrtInterface, TrtModel -from collections import namedtuple, OrderedDict -from torchvision import transforms as T -import cv2 -import os +# from wild_visual_navigation import WVN_ROOT_DIR +from pytictac import Timer +from wild_visual_navigation.feature_extractor import ( + DinoInterface, + # TrtModel, +) + +# from collections import namedtuple, OrderedDict +# from torchvision import transforms as T +# import os import torch -import tensorrt as trt -import numpy as np -import os +from wild_visual_navigation.utils.testing import load_test_image, get_dino_transform + +# import tensorrt as trt +# import numpy as np def test_dino_interfacer(): device = "cuda" if torch.cuda.is_available() else "cpu" di = DinoInterface(device) + transform = get_dino_transform() - np_img = cv2.imread(os.path.join(WVN_ROOT_DIR, "assets/images/forest_clean.png")) - img = torch.from_numpy(cv2.cvtColor(np_img, cv2.COLOR_BGR2RGB)).to(device) - img = img.permute(2, 0, 1) - img = (img.type(torch.float32) / 255)[None] + img = load_test_image().to(device) ##################################################################################### for i in range(5): im = img + torch.rand(img.shape, device=img.device) / 100 - di.inference(di.transform(im)) + di.inference(transform(im)) ##################################################################################### with Timer("BS1 Dino Inference: "): for i in range(5): im = img + torch.rand(img.shape, device=img.device) / 100 with Timer("BS1 Dino Single: "): - res = di.inference(di.transform(im)) + di.inference(transform(im)) ##################################################################################### # img = img.repeat(4, 1, 1, 1) @@ -41,14 +43,15 @@ def test_dino_interfacer(): # res = di.inference(di.transform(im)) ##################################################################################### - # Conversion from ONNX model (https://github.com/facebookresearch/dino) - exported_trt_file = "dino_exported.trt" - exported_trt_path = os.path.join(WVN_ROOT_DIR, "assets/dino", exported_trt_file) - di_trt = DinoTrtInterface(exported_trt_path, device) + # # Conversion from ONNX model (https://github.com/facebookresearch/dino) + # from wild_visual_navigation.feature_extractor import DinoTrtInterface + # exported_trt_file = "dino_exported.trt" + # exported_trt_path = os.path.join(WVN_ROOT_DIR, "assets/dino", exported_trt_file) + # di_trt = DinoTrtInterface(exported_trt_path, device) - with Timer("TensorRT Inference: "): - im = img + torch.rand(img.shape, device=img.device) / 100 - y = di_trt.inference(di.transform(im).contiguous()) + # with Timer("TensorRT Inference: "): + # im = img + torch.rand(img.shape, device=img.device) / 100 + # di_trt.inference(di.transform(im).contiguous()) ##################################################################################### # Conversion using the torch_tensorrt library: https://github.com/pytorch/TensorRT diff --git a/tests/test_dino_time_settings.py b/tests/test_dino_time_settings.py index 02ca5cd5..22cc87f6 100644 --- a/tests/test_dino_time_settings.py +++ b/tests/test_dino_time_settings.py @@ -1,16 +1,20 @@ from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.utils import Timer -from wild_visual_navigation.feature_extractor import DinoInterface, DinoTrtInterface, TrtModel -from collections import namedtuple, OrderedDict -from torchvision import transforms as T -import cv2 +from wild_visual_navigation.feature_extractor import ( + DinoInterface, + # DinoTrtInterface, + # TrtModel, +) + +# from collections import namedtuple, OrderedDict +# from torchvision import transforms as T +# import cv2 import os from os.path import join import numpy as np import pandas as pd -import os import torch -import tensorrt as trt + +# import tensorrt as trt from tqdm import tqdm import contextlib @@ -35,10 +39,7 @@ def capture_output(): def run_dino_interfacer(): """Performance inference using stego and stores result as an image.""" - from wild_visual_navigation.utils import Timer - from wild_visual_navigation.visu import get_img_from_fig - import matplotlib.pyplot as plt - from stego.src import remove_axes + from pytictac import Timer import cv2 # Create test directory @@ -85,7 +86,7 @@ def run_dino_interfacer(): for model in MODELS: for patch in PATCHES: di = None - with capture_output() as out: + with capture_output(): # Create DINO di = DinoInterface( device=device, @@ -99,12 +100,12 @@ def run_dino_interfacer(): print(size, interp, model, patch, img_name) for n in tqdm(range(TRIALS)): - with capture_output() as out: + with capture_output(): try: timer.tic() - feat_dino = di.inference(di.transform(img), interpolate=False) + di.inference(di.transform(img), interpolate=False) t = timer.toc() - except Exception as e: + except Exception: t = np.nan samples.append([size, interp, model, patch, img_name, n, t]) @@ -114,7 +115,16 @@ def run_dino_interfacer(): # Make pandas dataframe df = pd.DataFrame( - samples, columns=["image_size", "interpolation", "model", "patch_size", "img_name", "sample", "time"] + samples, + columns=[ + "image_size", + "interpolation", + "model", + "patch_size", + "img_name", + "sample", + "time", + ], ) df.to_pickle(join(test_path, f"dino_time_settings_{TRIALS}iter.pkl")) df.to_csv(join(test_path, f"dino_time_settings_{TRIALS}iter.csv")) diff --git a/tests/test_feature_extractor.py b/tests/test_feature_extractor.py index 06776035..c0785f42 100644 --- a/tests/test_feature_extractor.py +++ b/tests/test_feature_extractor.py @@ -1,51 +1,52 @@ -from wild_visual_navigation import WVN_ROOT_DIR from wild_visual_navigation.feature_extractor import FeatureExtractor from wild_visual_navigation.visu import get_img_from_fig +from wild_visual_navigation.utils.testing import load_test_image, get_dino_transform, make_results_folder +from os.path import join +from pytictac import Timer import matplotlib.pyplot as plt import torch -from torchvision import transforms as T -from pathlib import PurePath, Path -import os -import cv2 +import itertools def test_feature_extractor(): - segmentation_type = "none" - feature_type = "dino" - device = "cuda" if torch.cuda.is_available() else "cpu" - fe = FeatureExtractor(device, segmentation_type=segmentation_type, feature_type=feature_type) - - transform = T.Compose( - [ - T.Resize(448, T.InterpolationMode.NEAREST), - T.CenterCrop(448), - ] - ) - - np_img = cv2.imread(os.path.join(WVN_ROOT_DIR, "assets/images/forest_clean.png")) - img = torch.from_numpy(cv2.cvtColor(np_img, cv2.COLOR_BGR2RGB)).to(device) - img = img.permute(2, 0, 1) - img = (img.type(torch.float32) / 255)[None] - adj, feat, seg, center = fe.extract(transform(img.clone())) - - p = PurePath(WVN_ROOT_DIR).joinpath( - "results", "test_feature_extractor", f"forest_clean_graph_{segmentation_type}.png" - ) - Path(p.parent).mkdir(parents=True, exist_ok=True) - - # Plot result as in colab - fig, ax = plt.subplots(1, 2, figsize=(5 * 3, 5)) - - ax[0].imshow(transform(img).permute(0, 2, 3, 1)[0].cpu()) - ax[0].set_title("Image") - ax[1].imshow(seg.cpu(), cmap=plt.colormaps.get("inferno")) - ax[1].set_title("Segmentation") - plt.tight_layout() - - # Store results to test directory - img = get_img_from_fig(fig) - img.save(str(p)) + segmentation_types = ["none", "grid", "slic", "random", "stego"] + feature_types = ["dino", "dinov2", "stego"] + backbone_types = ["vit_small", "vit_base"] # "vit_small_reg", "vit_base_reg"] + + for seg_type, feat_type, back_type in itertools.product(segmentation_types, feature_types, backbone_types): + if seg_type == "stego" and feat_type != "stego": + continue + + with Timer(f"Running seg [{seg_type}], feat [{feat_type}], backbone [{back_type}]"): + try: + fe = FeatureExtractor( + device, segmentation_type=seg_type, feature_type=feat_type, backbone_type=back_type + ) + except Exception: + print("Not available") + continue + + img = load_test_image().to(device) + transform = get_dino_transform() + outpath = make_results_folder("test_feature_extractor") + + # Compute + edges, feat, seg, center, dense_feat = fe.extract(transform(img.clone())) + + # Plot result as in colab + fig, ax = plt.subplots(1, 2, figsize=(5 * 3, 5)) + + ax[0].imshow(transform(img).permute(0, 2, 3, 1)[0].cpu()) + ax[0].set_title("Image") + ax[1].imshow(seg.cpu(), cmap=plt.colormaps.get("gray")) + ax[1].set_title("Segmentation") + plt.tight_layout() + + # Store results to test directory + img = get_img_from_fig(fig) + img.save(join(outpath, f"forest_clean_graph_{seg_type}_{feat_type}.png")) + plt.close() if __name__ == "__main__": diff --git a/tests/test_image_projector.py b/tests/test_image_projector.py index 1472af4b..61eea3c7 100644 --- a/tests/test_image_projector.py +++ b/tests/test_image_projector.py @@ -1,32 +1,25 @@ from wild_visual_navigation.image_projector import run_image_projector -from wild_visual_navigation.traversability_estimator import ProprioceptionNode +from wild_visual_navigation.traversability_estimator import SupervisionNode def test_image_projector(): run_image_projector() -def test_proprioceptive_projection(): - from wild_visual_navigation import WVN_ROOT_DIR +def test_supervision_projection(): from wild_visual_navigation.image_projector import ImageProjector from wild_visual_navigation.visu import get_img_from_fig - from wild_visual_navigation.utils import Timer - from wild_visual_navigation.utils import make_plane, make_box, make_dense_plane, make_polygon_from_points - from PIL import Image + from wild_visual_navigation.utils.testing import load_test_image, make_results_folder from liegroups.torch import SE3, SO3 import matplotlib.pyplot as plt import torch - import torchvision.transforms as transforms - import os from os.path import join from kornia.utils import tensor_to_image - from stego.src import remove_axes + from stego.utils import remove_axes import random - to_tensor = transforms.ToTensor() - # Create test directory - os.makedirs(join(WVN_ROOT_DIR, "results", "test_image_projector"), exist_ok=True) + outpath = make_results_folder("test_image_projector") # Define number of cameras (batch) B = 100 @@ -38,19 +31,17 @@ def test_proprioceptive_projection(): # Extrisics pose_camera_in_world = torch.eye(4)[None] - # Image size - H = 1080 - W = 1440 + H = torch.tensor(1080) + W = torch.tensor(1440) # Create projector im = ImageProjector(K, H, W) # Load image - pil_image = Image.open(join(WVN_ROOT_DIR, "assets/images/forest_clean.png")) + torch_image = load_test_image() - # Convert to torch - torch_image = to_tensor(pil_image) + # Resize torch_image = im.resize_image(torch_image) mask = (torch_image * 0.0)[None] @@ -70,15 +61,15 @@ def test_proprioceptive_projection(): delta = SE3(R_WC, rho).as_matrix()[None] # Pose matrix of camera in world frame pose_base_in_world = pose_base_in_world @ delta pose_footprint_in_base = torch.eye(4)[None] - print(delta, pose_base_in_world) + # print(delta, pose_base_in_world) twist = torch.rand((3,)) - proprioception = torch.rand((10,)) + supervision = torch.rand((10,)) traversability = torch.rand(1) traversability_var = torch.tensor([0.2]) color = torch.rand((3,))[None] - proprio_node = ProprioceptionNode( + supervision_node = SupervisionNode( timestamp=i, pose_base_in_world=pose_base_in_world, pose_footprint_in_base=pose_footprint_in_base, @@ -86,15 +77,15 @@ def test_proprioceptive_projection(): width=0.5, length=0.8, height=0.1, - proprioception=proprioception, + supervision=supervision, traversability=traversability, traversability_var=traversability_var, is_untraversable=torch.BoolTensor([False]), ) - nodes.append(proprio_node) + nodes.append(supervision_node) if i > 0: - footprint = proprio_node.make_footprint_with_node(nodes[i - 1])[None] + footprint = supervision_node.make_footprint_with_node(nodes[i - 1])[None] # project footprint k_mask, torch_image_overlay, k_points, k_valid = im.project_and_render( @@ -107,7 +98,7 @@ def test_proprioceptive_projection(): fig, ax = plt.subplots(1, 2, figsize=(2 * 5, 5)) ax[0].imshow(tensor_to_image(torch_image)) ax[0].set_title("Image") - ax[1].imshow(tensor_to_image(mask)) + ax[1].imshow(tensor_to_image(mask[0])) ax[1].set_title("Labels") remove_axes(ax) @@ -115,8 +106,8 @@ def test_proprioceptive_projection(): # Store results to test directory img = get_img_from_fig(fig) - img.save(join(WVN_ROOT_DIR, "results", "test_image_projector", "forest_clean_proprioceptive_projection.png")) + img.save(join(outpath, "forest_clean_supervision_projection.png")) if __name__ == "__main__": - test_proprioceptive_projection() + test_supervision_projection() diff --git a/tests/test_kornia.py b/tests/test_kornia.py index c44cedfe..cd599ac4 100644 --- a/tests/test_kornia.py +++ b/tests/test_kornia.py @@ -1,6 +1,5 @@ -from wild_visual_navigation import WVN_ROOT_DIR from wild_visual_navigation.visu import get_img_from_fig -import os +from wild_visual_navigation.utils.testing import make_results_folder from os.path import join import matplotlib.pyplot as plt import torch @@ -47,8 +46,8 @@ def test_draw_polygon(): # Draw k_out = draw_convex_polygon(img, poly, color) # Show - os.makedirs(join(WVN_ROOT_DIR, "results", "test_kornia"), exist_ok=True) + outpath = make_results_folder("test_kornia") fig, ax = plt.subplots(1, 1, figsize=(5, 5)) ax.imshow(tensor_to_image(k_out)) out_img = get_img_from_fig(fig) - out_img.save(join(WVN_ROOT_DIR, "results", "test_kornia", "polygon_test.png")) + out_img.save(join(outpath, "polygon_test.png")) diff --git a/tests/test_monitoring.py b/tests/test_monitoring.py index c5a38b5e..91c36fe2 100644 --- a/tests/test_monitoring.py +++ b/tests/test_monitoring.py @@ -1,4 +1,5 @@ -from wild_visual_navigation.utils import SystemLevelGpuMonitor, SystemLevelTimer, accumulate_memory, accumulate_time +from wild_visual_navigation.utils import SystemLevelGpuMonitor, accumulate_memory +from pytictac import accumulate_time import time import torch @@ -14,26 +15,31 @@ def __init__(self): @accumulate_time def test_memory_then_timing(self, s): time.sleep(s / 1000) - self.tensors.append(torch.zeros((s, s), device="cuda")) + self.tensors.append(torch.zeros((10 * s, 10 * s), device="cuda")) @accumulate_time @accumulate_memory def test_timing_then_memory(self, s): time.sleep(s / 1000) - self.tensors.append(torch.zeros((4, s, s), device="cuda")) + self.tensors.append(torch.zeros((4, 10 * s, 10 * s), device="cuda")) # Create objects my_test = MyTest() gpu_monitor = SystemLevelGpuMonitor( - objects=[my_test], names=["test"], enabled=True, device="cuda", store_samples=True, skip_n_samples=1 - ) - time_monitor = SystemLevelTimer( objects=[my_test], names=["test"], + enabled=True, + device="cuda", + store_samples=True, + skip_n_samples=1, ) + # time_monitor = ClassContextTimer( + # objects=[my_test], + # names=["test"], + # ) # Run loop - for n in range(400): + for n in range(100): print(f"step {n}") step = n t = n / 10 @@ -42,7 +48,7 @@ def test_timing_then_memory(self, s): my_test.test_timing_then_memory(n) gpu_monitor.store("/tmp") - time_monitor.store("/tmp") + # time_monitor.store("/tmp") if __name__ == "__main__": diff --git a/tests/test_optical_flow.py b/tests/test_optical_flow.py index c4bd8b3c..065b5b0b 100644 --- a/tests/test_optical_flow.py +++ b/tests/test_optical_flow.py @@ -1,15 +1,22 @@ -from pytorch_pwc.network import PwcFlowEstimator -from pytorch_pwc import PWC_ROOT_DIR -import torch -from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.visu import LearningVisualizer -import numpy as np -import PIL -import os -from wild_visual_navigation.utils import Timer +import pytest +import sys -if __name__ == "__main__": +try: + from pytorch_pwc.network import PwcFlowEstimator + from pytorch_pwc import PWC_ROOT_DIR +except ImportError: + pass + + +@pytest.mark.skipif("pytorch_pwc" not in sys.modules, reason="requires the pytorch_pwc library") +def pytorch_pwc_test(): import os + import torch + from wild_visual_navigation import WVN_ROOT_DIR + from wild_visual_navigation.visu import LearningVisualizer + import numpy as np + import PIL + from pytictac import Timer tenOne = torch.FloatTensor( np.ascontiguousarray( @@ -39,3 +46,7 @@ visu = LearningVisualizer(p_visu=os.path.join(WVN_ROOT_DIR, "results/test_visu"), store=True) visu.plot_optical_flow(res, tenOne, tenTwo) print("done") + + +if __name__ == "__main__": + pytorch_pwc_test() diff --git a/tests/test_traversability_estimator.py b/tests/test_traversability_estimator.py index 984e4ef6..fd3d2d51 100644 --- a/tests/test_traversability_estimator.py +++ b/tests/test_traversability_estimator.py @@ -1,4 +1,7 @@ -from wild_visual_navigation.traversability_estimator import run_base_state, run_base_graph +from wild_visual_navigation.traversability_estimator import ( + run_base_state, + run_base_graph, +) def test_base_state(): diff --git a/wild_visual_navigation/__init__.py b/wild_visual_navigation/__init__.py index c3c60af9..1c859e93 100644 --- a/wild_visual_navigation/__init__.py +++ b/wild_visual_navigation/__init__.py @@ -1,4 +1,9 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import os WVN_ROOT_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) -"""Absolute path to the wild_visual_navigation repository.""" +"""Absolute path to the wild_visual_navigation repository.""" diff --git a/wild_visual_navigation/cfg/__init__.py b/wild_visual_navigation/cfg/__init__.py index 829340ea..590af461 100644 --- a/wild_visual_navigation/cfg/__init__.py +++ b/wild_visual_navigation/cfg/__init__.py @@ -1 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from .global_params import get_global_env_params, GlobalEnvironmentParams from .experiment_params import ExperimentParams +from .ros_params import RosLearningNodeParams, RosFeatureExtractorNodeParams diff --git a/wild_visual_navigation/cfg/experiment_params.py b/wild_visual_navigation/cfg/experiment_params.py index f786e995..d8cba169 100644 --- a/wild_visual_navigation/cfg/experiment_params.py +++ b/wild_visual_navigation/cfg/experiment_params.py @@ -1,10 +1,19 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from dataclasses import dataclass, field -from typing import Tuple, Dict, List, Optional -from simple_parsing.helpers import Serializable +from typing import List, Optional +from typing import Any +import os +from wild_visual_navigation.cfg import get_global_env_params, GlobalEnvironmentParams @dataclass -class ExperimentParams(Serializable): +class ExperimentParams: + env: GlobalEnvironmentParams = get_global_env_params(os.environ.get("ENV_WORKSTATION_NAME", "default")) + @dataclass class GeneralParams: name: str = "debug/debug" @@ -42,14 +51,22 @@ class LossParams: w_trav: float = 0.03 w_reco: float = 0.5 w_temp: float = 0.0 # 0.75 - method: str = "latest_measurment" - confidence_std_factor: float = 1.0 + method: str = "latest_measurement" + confidence_std_factor: float = 0.5 trav_cross_entropy: bool = False loss: LossParams = LossParams() + @dataclass + class LossAnomalyParams: + method: str = "latest_measurement" + confidence_std_factor: float = 0.5 + + loss_anomaly: LossAnomalyParams = LossAnomalyParams() + @dataclass class TrainerParams: + default_root_dir: Optional[str] = None precision: int = 32 accumulate_grad_batches: int = 1 fast_dev_run: bool = False @@ -57,7 +74,7 @@ class TrainerParams: limit_val_batches: float = 1.0 limit_test_batches: float = 1.0 max_epochs: Optional[int] = None - profiler: bool = False + profiler: Any = False num_sanity_val_steps: int = 0 check_val_every_n_epoch: int = 1 enable_checkpointing: bool = True @@ -65,6 +82,7 @@ class TrainerParams: enable_progress_bar: bool = True weights_summary: Optional[str] = "top" progress_bar_refresh_rate: Optional[int] = None + gpus: int = -1 trainer: TrainerParams = TrainerParams() @@ -84,12 +102,12 @@ class AblationDataModuleParams: @dataclass class ModelParams: - name: str = "SimpleMLP" + name: str = "SimpleMLP" # LinearRnvp, SimpleMLP, SimpleGCN, DoubleMLP load_ckpt: Optional[str] = None @dataclass class SimpleMlpCfgParams: - input_size: int = 90 + input_size: int = 90 # 90 for stego, 384 for dino hidden_sizes: List[int] = field(default_factory=lambda: [256, 32, 1]) reconstruction: bool = True @@ -97,19 +115,30 @@ class SimpleMlpCfgParams: @dataclass class DoubleMlpCfgParams: - input_size: int = 90 + input_size: int = 384 hidden_sizes: List[int] = field(default_factory=lambda: [64, 32, 1]) double_mlp_cfg: DoubleMlpCfgParams = DoubleMlpCfgParams() @dataclass class SimpleGcnCfgParams: - input_size: int = 90 + input_size: int = 384 reconstruction: bool = True hidden_sizes: List[int] = field(default_factory=lambda: [256, 128, 1]) simple_gcn_cfg: SimpleGcnCfgParams = SimpleGcnCfgParams() + @dataclass + class LinearRnvpCfgParams: + input_size: int = 384 + coupling_topology: List[int] = field(default_factory=lambda: [200]) + mask_type: str = "odds" + conditioning_size: int = 0 + use_permutation: bool = True + single_function: bool = False + + linear_rnvp_cfg: LinearRnvpCfgParams = LinearRnvpCfgParams() + model: ModelParams = ModelParams() @dataclass @@ -142,20 +171,10 @@ class VisuParams: @dataclass class LearningVisuParams: - p_visu: Optional[bool] = None + p_visu: Optional[str] = None store: bool = True log: bool = True learning_visu: LearningVisuParams = LearningVisuParams() visu: VisuParams = VisuParams() - - def verify_params(self): - if not self.general.log_to_disk: - assert self.trainer.profiler != "advanced", "Should not be advanced if not logging to disk" - assert self.cb_checkpoint.active == False, "Should be False if not logging to disk" - - if self.loss.trav_cross_entropy: - self.model.simple_mlp_cfg.hidden_sizes[-1] = 2 - self.model.double_mlp_cfg.hidden_sizes[-1] = 2 - self.model.simple_gcn_cfg.hidden_sizes[-1] = 2 diff --git a/wild_visual_navigation/cfg/global_params.py b/wild_visual_navigation/cfg/global_params.py new file mode 100644 index 00000000..159edd1f --- /dev/null +++ b/wild_visual_navigation/cfg/global_params.py @@ -0,0 +1,21 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from dataclasses import dataclass + + +@dataclass +class GlobalEnvironmentParams: + perugia_root: str + results: str + + +def get_global_env_params(name): + configs = { + "default": GlobalEnvironmentParams(perugia_root="TBD", results="results"), + "ge76": GlobalEnvironmentParams(perugia_root="TBD", results="results"), + "jetson": GlobalEnvironmentParams(perugia_root="TBD", results="results"), + } + return configs[name] diff --git a/wild_visual_navigation/cfg/ros_params.py b/wild_visual_navigation/cfg/ros_params.py new file mode 100644 index 00000000..64a057d3 --- /dev/null +++ b/wild_visual_navigation/cfg/ros_params.py @@ -0,0 +1,94 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from dataclasses import dataclass +from typing import Dict +from typing import Any + + +@dataclass +class RosLearningNodeParams: + # Input topics + camera_topics: Dict[str, Any] + robot_state_topic: str + desired_twist_topic: str + + # Relevant frames + fixed_frame: str + base_frame: str + footprint_frame: str + + # Robot size + robot_length: float + robot_width: float + robot_height: float + + # Traversability estimation params + traversability_radius: float # meters + image_graph_dist_thr: float # meters + supervision_graph_dist_thr: float # meters + confidence_std_factor: float + min_samples_for_training: int + network_input_image_height: int + network_input_image_width: int + vis_node_index: int + + # Supervision Generator + untraversable_thr: float + + mission_name: str + mission_timestamp: bool + + # Threads + image_callback_rate: float # hertz + supervision_callback_rate: float # hertz + learning_thread_rate: float # hertz + logging_thread_rate: float # hertz + load_save_checkpoint_rate: float # hert + + # Runtime options + device: str + mode: Any # check out comments in the class WVNMode + colormap: str + + print_image_callback_time: bool + print_supervision_callback_time: bool + log_time: bool + log_confidence: bool + verbose: bool + + extraction_store_folder: str + + +@dataclass +class RosFeatureExtractorNodeParams: + # Input topics + camera_topics: Dict[str, Any] + + # FeatureExtractor + network_input_image_height: int # 448 + network_input_image_width: int # 448 + segmentation_type: str + feature_type: str + dino_patch_size: int # 8 or 16; 8 is finer + dino_backbone: str # vit_small, vit_base + slic_num_components: int + + # ConfidenceGenerator + confidence_std_factor: float + + # Output setting + prediction_per_pixel: bool + + # Runtime options + mode: Any # check out comments in the class WVNMode + status_thread_rate: float # hertz + device: str + log_confidence: bool + verbose: bool + + # Threads + image_callback_rate: float # hertz + load_save_checkpoint_rate: float # hertz diff --git a/wild_visual_navigation/feature_extractor/__init__.py b/wild_visual_navigation/feature_extractor/__init__.py index c986125c..ba75ba13 100644 --- a/wild_visual_navigation/feature_extractor/__init__.py +++ b/wild_visual_navigation/feature_extractor/__init__.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from .dino_interface import DinoInterface, run_dino_interfacer from .torchvision_interface import TorchVisionInterface diff --git a/wild_visual_navigation/feature_extractor/dino_interface.py b/wild_visual_navigation/feature_extractor/dino_interface.py index 35f00ab4..816ad25e 100644 --- a/wild_visual_navigation/feature_extractor/dino_interface.py +++ b/wild_visual_navigation/feature_extractor/dino_interface.py @@ -1,256 +1,201 @@ -from wild_visual_navigation import WVN_ROOT_DIR -import os +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from os.path import join import torch.nn.functional as F import torch -from omegaconf import DictConfig from torchvision import transforms as T -from stego.src.train_segmentation import DinoFeaturizer +from omegaconf import OmegaConf + +from stego.backbones.backbone import get_backbone class DinoInterface: def __init__( self, device: str, + backbone: str = "dino", input_size: int = 448, - input_interp: str = "bilinear", - model_type: str = "vit_small", + backbone_type: str = "vit_small", patch_size: int = 8, + projection_type: str = None, # nonlinear or None + dropout_p: float = 0, # True or False + pretrained_weights: str = None, + cfg: OmegaConf = OmegaConf.create({}), ): - self.dim = 90 - self.cfg = DictConfig( - { - "dino_patch_size": patch_size, - "dino_feat_type": "feat", - "model_type": model_type, - "projection_type": "nonlinear", - "pretrained_weights": None, - "dropout": True, - } - ) - - # Pretrained weights - if self.cfg.pretrained_weights is None: - self.cfg.pretrained_weights = self.download_pretrained_model(self.cfg) + # Load config + if cfg.is_empty(): + self._cfg = OmegaConf.create( + { + "backbone": backbone, + "backbone_type": backbone_type, + "input_size": input_size, + "patch_size": patch_size, + "projection_type": projection_type, + "dropout_p": dropout_p, + "pretrained_weights": pretrained_weights, + } + ) + else: + self._cfg = cfg # Initialize DINO - self.model = DinoFeaturizer(self.dim, self.cfg) + self._model = get_backbone(self._cfg) # Send to device - self.model.to(device) - self.device = device - - self._input_size = input_size - self._input_interp = input_interp - - # Interpolation type - if self._input_interp == "bilinear": - interp = T.InterpolationMode.BILINEAR - elif self._input_interp == "nearest": - interp = T.InterpolationMode.NEAREST - - # Transformation for testing - self.transform = T.Compose( - [ - T.Resize(self._input_size, interp), - T.CenterCrop(self._input_size), - T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), - ] - ) + self._model.to(device) + self._device = device - self.crop = T.Compose( + # Other + normalization = T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + self._transform = T.Compose( [ - T.Resize(self._input_size, interp), - T.CenterCrop(self._input_size), + T.Resize(input_size, T.InterpolationMode.NEAREST), + T.CenterCrop(input_size), + normalization, ] ) - # Just normalization - self.norm = T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) - def change_device(self, device): """Changes the device of all the class members Args: device (str): new device """ - self.model.to(device) - self.device = device - - def download_pretrained_model(self, cfg: DictConfig): - """Loads model. - - Returns: - model (stego.src.train_segmentation.DinoFeaturizer): Pretrained model - """ - - arch = cfg.model_type - patch_size = cfg.dino_patch_size - - if arch == "vit_small" and patch_size == 16: - model = "dino_deitsmall16_pretrain" - elif arch == "vit_small" and patch_size == 8: - model = "dino_deitsmall8_300ep_pretrain" - elif arch == "vit_base" and patch_size == 16: - model = "dino_vitbase16_pretrain" - elif arch == "vit_base" and patch_size == 8: - model = "dino_vitbase8_pretrain" - elif arch == "resnet": - model = "dino_resnet50_pretrain" - else: - raise ValueError("Unknown arch and patch size") - - url = f"{model}/{model}.pth" - - # Download model - model_path = join(WVN_ROOT_DIR, "assets", "dino") - if not os.path.exists(model_path): - os.makedirs(model_path, exist_ok=True) - state_dict = torch.hub.load_state_dict_from_url( - url="https://dl.fbaipublicfiles.com/dino/" + url, model_dir=model_path - ) - - return join(model_path, f"{model}.pth") - - def get_feature_dim(self): - return self.dim + self._model.to(device) + self._device = device @torch.no_grad() - def inference(self, img: torch.tensor, interpolate: bool = False): + def inference(self, img: torch.tensor): """Performance inference using DINO Args: - img (torch.tensor, dtype=type.torch.float32, shape=(BS,3,H.W)): Input image + img (torch.tensor, dtype=type.torch.float32, shape=(B,3,H.W)): Input image Returns: - features (torch.tensor, dtype=torch.float32, shape=(BS,D,H,W)): per-pixel D-dimensional features + features (torch.tensor, dtype=torch.float32, shape=(B,D,H,W)): per-pixel D-dimensional features """ - # assert 1 == img.shape[0] - img = self.norm(img).to(self.device) + + # Resize image and normalize + resized_img = self._transform(img).to(self._device) # Extract features - features = self.model(img)[1] + features = self._model(resized_img) # resize and interpolate features B, D, H, W = img.shape - new_size = (H, H) - pad = int((W - H) / 2) - features = F.interpolate(features, new_size, mode="bilinear", align_corners=True) - features = F.pad(features, pad=[pad, pad, 0, 0]) - + new_features_size = (H, H) + # pad = int((W - H) / 2) + features = F.interpolate(features, new_features_size, mode="bilinear", align_corners=True) + # features = F.pad(features, pad=[pad, pad, 0, 0]) return features @property def input_size(self): - return self._input_size + return self._cfg.input_size @property - def input_interpolation(self): - return self._input_interp + def backbone(self): + return self._cfg.backbone @property - def model_type(self): - return self.cfg.model_type + def backbone_type(self): + return self._cfg.backbone_type @property def vit_patch_size(self): - return self.cfg.dino_patch_size + return self._cfg.patch_size def run_dino_interfacer(): """Performance inference using stego and stores result as an image.""" - from wild_visual_navigation.utils import Timer + from pytictac import Timer from wild_visual_navigation.visu import get_img_from_fig + from wild_visual_navigation.utils.testing import load_test_image, make_results_folder import matplotlib.pyplot as plt - from stego.src import remove_axes - import cv2 + from stego.utils import remove_axes # Create test directory - os.makedirs(join(WVN_ROOT_DIR, "results", "test_dino_interfacer"), exist_ok=True) + outpath = make_results_folder("test_dino_interfacer") # Inference model device = "cuda" if torch.cuda.is_available() else "cpu" - p = join(WVN_ROOT_DIR, "assets/images/forest_clean.png") - np_img = cv2.imread(p) - img = torch.from_numpy(cv2.cvtColor(np_img, cv2.COLOR_BGR2RGB)).to(device) - img = img.permute(2, 0, 1) - img = (img.type(torch.float32) / 255)[None] + img = load_test_image().to(device) + img = F.interpolate(img, scale_factor=0.25) plot = False save_features = True # Settings size = 448 - interp = "bilinear" model = "vit_small" patch = 8 + backbone = "dinov2" # Inference with DINO # Create DINO - di = DinoInterface(device=device, input_size=size, input_interp=interp, model_type=model, patch_size=patch) - - with Timer( - f"DINO, input_size, {di.input_size}, interp, {di.input_interpolation}, model, {di.model_type}, patch_size, {di.vit_patch_size}" - ): - try: - feat_dino = di.inference(di.transform(img), interpolate=False) - - if save_features: - for i in range(90): - fig = plt.figure(frameon=False) - fig.set_size_inches(2, 2) - ax = plt.Axes(fig, [0.0, 0.0, 1.0, 1.0]) - ax.set_axis_off() - fig.add_axes(ax) - ax.imshow(feat_dino[0][i].cpu(), cmap=plt.colormaps.get("inferno")) - - # Store results to test directory - out_img = get_img_from_fig(fig) - out_img.save( - join( - WVN_ROOT_DIR, - "results", - "test_dino_interfacer", - f"forest_clean_dino_feat{i:02}_{di.input_size}_{di.input_interpolation}_{di.model_type}_{di.vit_patch_size}.png", - ) - ) - plt.close("all") - - if plot: - # Plot result as in colab - fig, ax = plt.subplots(10, 11, figsize=(1 * 11, 1 * 11)) - - for i in range(10): - for j in range(11): - if i == 0 and j == 0: - continue - - elif (i == 0 and j != 0) or (i != 0 and j == 0): - ax[i][j].imshow(img.permute(0, 2, 3, 1)[0].cpu()) - ax[i][j].set_title("Image") - else: - n = (i - 1) * 10 + (j - 1) - if n >= di.get_feature_dim(): - break - ax[i][j].imshow(feat_dino[0][n].cpu(), cmap=plt.colormaps.get("inferno")) - ax[i][j].set_title("Features [0]") - remove_axes(ax) - plt.tight_layout() - - # Store results to test directory - out_img = get_img_from_fig(fig) - out_img.save( - join( - WVN_ROOT_DIR, - "results", - "test_dino_interfacer", - f"forest_clean_dino_{di.input_size}_{di.input_interpolation}_{di.model_type}_{di.vit_patch_size}.png", - ) + di = DinoInterface( + device=device, + backbone=backbone, + input_size=size, + backbone_type=model, + patch_size=patch, + ) + + with Timer(f"DINO, input_size, {di.input_size}, model, {di.backbone_type}, patch_size, {di.vit_patch_size}"): + feat_dino = di.inference(img) + + if save_features: + for i in range(90): + fig = plt.figure(frameon=False) + fig.set_size_inches(2, 2) + ax = plt.Axes(fig, [0.0, 0.0, 1.0, 1.0]) + ax.set_axis_off() + fig.add_axes(ax) + ax.imshow(feat_dino[0][i].cpu(), cmap=plt.colormaps.get("inferno")) + + # Store results to test directory + out_img = get_img_from_fig(fig) + out_img.save( + join( + outpath, + f"forest_clean_dino_feat{i:02}_{di.input_size}_{di.backbone_type}_{di.vit_patch_size}.png", ) - plt.close("all") - - except Exception as e: - pass + ) + plt.close("all") + + if plot: + # Plot result as in colab + fig, ax = plt.subplots(10, 11, figsize=(1 * 11, 1 * 11)) + + for i in range(10): + for j in range(11): + if i == 0 and j == 0: + continue + + elif (i == 0 and j != 0) or (i != 0 and j == 0): + ax[i][j].imshow(img.permute(0, 2, 3, 1)[0].cpu()) + ax[i][j].set_title("Image") + else: + n = (i - 1) * 10 + (j - 1) + if n >= di.get_feature_dim(): + break + ax[i][j].imshow(feat_dino[0][n].cpu(), cmap=plt.colormaps.get("inferno")) + ax[i][j].set_title("Features [0]") + remove_axes(ax) + plt.tight_layout() + + # Store results to test directory + out_img = get_img_from_fig(fig) + out_img.save( + join( + outpath, + f"forest_clean_{di.backbone}_{di.input_size}_{di.backbone_type}_{di.vit_patch_size}.png", + ) + ) + plt.close("all") if __name__ == "__main__": diff --git a/wild_visual_navigation/feature_extractor/dino_trt_interface.py b/wild_visual_navigation/feature_extractor/dino_trt_interface.py index b71dcbc4..adfcbc09 100644 --- a/wild_visual_navigation/feature_extractor/dino_trt_interface.py +++ b/wild_visual_navigation/feature_extractor/dino_trt_interface.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation import WVN_ROOT_DIR from os.path import join from omegaconf import DictConfig @@ -55,7 +60,9 @@ def __call__(self, x: torch.tensor, batch_size=1): class DinoTrtInterface: def __init__( - self, trt_model_path: str = os.path.join(WVN_ROOT_DIR, "assets/dino/dino_exported.trt"), device: str = "cuda" + self, + trt_model_path: str = os.path.join(WVN_ROOT_DIR, "assets/dino/dino_exported.trt"), + device: str = "cuda", ): self.device = device self.dim = 90 @@ -122,24 +129,22 @@ def run_dino_trt_interfacer(): """Performance inference using stego and stores result as an image.""" from wild_visual_navigation.visu import get_img_from_fig + from wild_visual_navigation.testing import load_test_image, get_dino_transform + from wild_visual_navigation.utils.testing import make_results_folder import matplotlib.pyplot as plt from stego.src import remove_axes - import cv2 # Create test directory - os.makedirs(join(WVN_ROOT_DIR, "results", "test_dino_trt_interfacer"), exist_ok=True) + outpath = make_results_folder("test_dino_trt_interfacer") # Inference model device = "cuda" if torch.cuda.is_available() else "cpu" di = DinoTrtInterface(device=device) - p = join(WVN_ROOT_DIR, "assets/images/forest_clean.png") - np_img = cv2.imread(p) - img = torch.from_numpy(cv2.cvtColor(np_img, cv2.COLOR_BGR2RGB)).to(device) - img = img.permute(2, 0, 1) - img = (img.type(torch.float32) / 255)[None] + img = load_test_image().to(device) + transform = get_dino_transform() # Inference with DINO - feat_dino = di.inference(di.transform(img), interpolate=False) + feat_dino = di.inference(transform(img), interpolate=False) # Fix size of DINO features to match input image's size B, D, H, W = img.shape @@ -169,7 +174,7 @@ def run_dino_trt_interfacer(): # Store results to test directory img = get_img_from_fig(fig) - img.save(join(WVN_ROOT_DIR, "results", "test_dino_trt_interfacer", "forest_clean_dino.png")) + img.save(join(outpath, "forest_clean_dino.png")) if __name__ == "__main__": diff --git a/wild_visual_navigation/feature_extractor/feature_extractor.py b/wild_visual_navigation/feature_extractor/feature_extractor.py index 74c42801..053a2e82 100644 --- a/wild_visual_navigation/feature_extractor/feature_extractor.py +++ b/wild_visual_navigation/feature_extractor/feature_extractor.py @@ -1,23 +1,29 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation.feature_extractor import ( StegoInterface, DinoInterface, SegmentExtractor, TorchVisionInterface, ) -from wild_visual_navigation.utils import Timer -import skimage import torch import numpy as np import kornia from kornia.feature import DenseSIFTDescriptor from kornia.contrib import extract_tensor_patches, combine_tensor_patches -from torchvision import transforms as T -from PIL import Image, ImageDraw class FeatureExtractor: def __init__( - self, device: str, segmentation_type: str = "slic", feature_type: str = "dino", input_size: int = 448, **kwargs + self, + device: str, + segmentation_type: str = "slic", + feature_type: str = "dino", + input_size: int = 448, + **kwargs, ): """Feature extraction from image @@ -30,37 +36,79 @@ def __init__( self._segmentation_type = segmentation_type self._feature_type = feature_type self._input_size = input_size + # Prepare segment extractor self.segment_extractor = SegmentExtractor().to(self._device) # Prepare extractor depending on the type if self._feature_type == "stego": self._feature_dim = 90 - self.extractor = StegoInterface(device=device, input_size=input_size) - elif self._feature_type == "dino": - self._feature_dim = 90 + self._extractor = StegoInterface( + device=device, + input_size=input_size, + n_image_clusters=kwargs.get("n_image_clusters", 20), + run_clustering=kwargs.get("run_clustering", True), + run_crf=kwargs.get("run_crf", False), + ) + + elif "dino" in self._feature_type: + self._feature_dim = 384 + self._extractor = DinoInterface( + device=device, + input_size=input_size, + patch_size=kwargs.get("patch_size", 8), + backbone=kwargs.get("backbone", self._feature_type), + backbone_type=kwargs.get("backbone_type", "vit_base"), + ) - self.extractor = DinoInterface(device=device, input_size=input_size, patch_size=kwargs.get("patch_size", 8)) elif self._feature_type == "sift": self._feature_dim = 128 - self.extractor = DenseSIFTDescriptor().to(device) + self._extractor = DenseSIFTDescriptor().to(device) + elif self._feature_type == "torchvision": self._extractor = TorchVisionInterface( device=device, model_type=kwargs["model_type"], input_size=input_size ) + elif self._feature_type == "histogram": self._feature_dim = 90 + + elif self._feature_type == "none": + pass + else: raise f"Extractor[{self._feature_type}] not supported!" + # Segmentation if self.segmentation_type == "slic": from fast_slic import Slic self.slic = Slic( - num_components=kwargs.get("slic_num_components", 100), compactness=kwargs.get("slic_compactness", 10) + num_components=kwargs.get("slic_num_components", 100), + compactness=kwargs.get("slic_compactness", 10), ) + elif self.segmentation_type == "random": + pass + def extract(self, img, **kwargs): + if self._segmentation_type == "random": + dense_feat = self.compute_features(img, None, None, **kwargs) + + H, W = img.shape[2:] + nr = kwargs.get("n_random_pixels", 100) + + seg = torch.full((H * W,), -1, dtype=torch.long, device=self._device) + indices = torch.randperm(H * W, device=self._device)[:nr] + seg[indices] = torch.arange(0, nr, device=self._device) + seg = seg.reshape(H, W) + feat = dense_feat[0].reshape(dense_feat.shape[1], H * W)[:, indices].T + + if kwargs.get("return_dense_features", False): + return None, feat, seg, None, dense_feat + + return None, feat, seg, None, None + # Compute segments, their centers, and edges connecting them (graph structure) # with Timer("feature_extractor - compute_segments"): edges, seg, center = self.compute_segments(img, **kwargs) @@ -69,10 +117,14 @@ def extract(self, img, **kwargs): # with Timer("feature_extractor - compute_features"): dense_feat = self.compute_features(img, seg, center, **kwargs) + # with Timer("feature_extractor - compute_features"): # Sparsify features to match the centers if required feat = self.sparsify_features(dense_feat, seg) - return edges, feat, seg, center + if kwargs.get("return_dense_features", False): + return edges, feat, seg, center, dense_feat + + return edges, feat, seg, center, None @property def feature_type(self): @@ -93,7 +145,7 @@ def change_device(self, device): device (str): new device """ self._device = device - self.extractor.change_device(device) + self._extractor.change_device(device) def compute_segments(self, img: torch.tensor, **kwargs): if self._segmentation_type == "none" or self._segmentation_type is None: @@ -107,17 +159,21 @@ def compute_segments(self, img: torch.tensor, **kwargs): elif self._segmentation_type == "stego": seg = self.segment_stego(img, **kwargs) + + elif self._segmentation_type == "random": + seg = self.segment_random(img, **kwargs) + else: raise f"segmentation_type [{self._segmentation_type}] not supported" # Compute edges and centers if self._segmentation_type != "none" and self._segmentation_type is not None: # Extract adjacency_list based on segments - edges = self.segment_extractor.adjacency_list(seg[None, None]) + edges = self.segment_extractor.adjacency_list(seg) # Extract centers - centers = self.segment_extractor.centers(seg[None, None]) + centers = self.segment_extractor.centers(seg) - return edges.T, seg, centers + return edges.T, seg[0, 0], centers def segment_pixelwise(self, img, **kwargs): # Generate pixel-wise segmentation @@ -136,7 +192,7 @@ def segment_pixelwise(self, img, **kwargs): ver_edges = torch.cat((seg[:-1, :].reshape(-1, 1), seg[1:, :].reshape(-1, 1)), dim=1) edges = torch.cat((hor_edges, ver_edges), dim=0) - return edges, seg, centers + return edges, seg[None, None], centers def segment_grid(self, img, **kwargs): cell_size = kwargs.get("cell_size", 32) @@ -151,29 +207,42 @@ def segment_grid(self, img, **kwargs): for i in range(patches.shape[1]): patches[:, i, :, :, :] = i - combine_patch_size = (int(H / cell_size), int(W / cell_size)) + # combine_patch_size = (int(H / cell_size), int(W / cell_size)) seg = combine_tensor_patches( - patches=patches, original_size=(H, W), window_size=combine_patch_size, stride=combine_patch_size + patches=patches, + original_size=(H, W), + window_size=patch_size, + stride=patch_size, ) - return seg[0, 0].to(self._device) + return seg.to(self._device) def segment_slic(self, img, **kwargs): # Get slic clusters img_np = kornia.utils.tensor_to_image(img) - seg = self.slic.iterate(np.uint8(np.ascontiguousarray(img_np) * 255)) + seg = self.slic.iterate(np.uint8(np.ascontiguousarray(img_np) * 255))[None, None] return torch.from_numpy(seg).to(self._device).type(torch.long) + def segment_random(self, img, **kwargs): + # Randomly select + H, W = img.shape[2:] + nr = kwargs.get("n_random_pixels", 100) + seg = torch.full((H * W,), -1, dtype=torch.long, device=self._device) + indices = torch.randperm(H * W, device=self._device)[:nr] + seg[indices] = torch.arange(0, nr, device=self._device) + seg = seg.reshape(H, W)[None, None] + return seg + def segment_stego(self, img, **kwargs): # Prepare input image img_internal = img.clone() - self.extractor.inference_crf(img_internal) - seg = torch.from_numpy(self.extractor.cluster_segments).to(self._device) + self._extractor.inference(img_internal) + seg = self._extractor.cluster_segments.to(self._device) + # seg = torch.from_numpy(self._extractor.cluster_segments).to(self._device) # Change the segment indices by numbers from 0 to N for i, k in enumerate(seg.unique()): seg[seg == k.item()] = i - return seg def compute_features(self, img: torch.tensor, seg: torch.tensor, center: torch.tensor, **kwargs): @@ -183,7 +252,7 @@ def compute_features(self, img: torch.tensor, seg: torch.tensor, center: torch.t elif self._feature_type == "sift": feat = self.compute_sift(img, seg, center, **kwargs) - elif self._feature_type == "dino": + elif "dino" in self._feature_type: feat = self.compute_dino(img, seg, center, **kwargs) elif self._feature_type == "stego": @@ -191,7 +260,8 @@ def compute_features(self, img: torch.tensor, seg: torch.tensor, center: torch.t elif self._feature_type == "torchvision": feat = self.compute_torchvision(img, seg, center, **kwargs) - + elif self._feature_type == "none": + feat = None else: raise f"segmentation_type [{self._segmentation_type}] not supported" @@ -204,18 +274,18 @@ def compute_histogram(self, img: torch.tensor, seg: torch.tensor, **kwargs): def compute_sift(self, img: torch.tensor, seg: torch.tensor, center: torch.tensor, **kwargs): B, C, H, W = img.shape if C == 3: - feat_r = self.extractor(img[:, 0, :, :][None]) - feat_g = self.extractor(img[:, 1, :, :][None]) - feat_b = self.extractor(img[:, 2, :, :][None]) - features = torch.cat([feat_r, feat_r, feat_b], dim=1) + feat_r = self._extractor(img[:, 0, :, :][None]) + feat_g = self._extractor(img[:, 1, :, :][None]) + feat_b = self._extractor(img[:, 2, :, :][None]) + features = torch.cat([feat_r, feat_g, feat_b], dim=1) else: - features = self.extractor(img) + features = self._extractor(img) return features @torch.no_grad() def compute_dino(self, img: torch.tensor, seg: torch.tensor, center: torch.tensor, **kwargs): img_internal = img.clone() - features = self.extractor.inference(img_internal) + features = self._extractor.inference(img_internal) return features @torch.no_grad() @@ -226,7 +296,11 @@ def compute_torchvision(self, img: torch.tensor, seg: torch.tensor, center: torc @torch.no_grad() def compute_stego(self, img: torch.tensor, seg: torch.tensor, center: torch.tensor, **kwargs): - return self.extractor.features + try: + return self._extractor.features + except Exception: + self.segment_stego(img, **kwargs) + return self._extractor.features def sparsify_features(self, dense_features: torch.tensor, seg: torch.tensor, cumsum_trick=False): if self._feature_type not in ["histogram"] and self._segmentation_type not in ["none"]: @@ -239,7 +313,8 @@ def sparsify_features(self, dense_features: torch.tensor, seg: torch.tensor, cum segs = [ torch.nn.functional.interpolate( - seg[None, None, :, :].type(torch.float32), scale_factor=(scale_x, scale_y) + seg[None, None, :, :].type(torch.float32), + scale_factor=(scale_x, scale_y), )[0, 0].type(torch.long) for scale_x, scale_y in zip(scales_x, scales_y) ] @@ -252,6 +327,11 @@ def sparsify_features(self, dense_features: torch.tensor, seg: torch.tensor, cum # Iterate over each scale for dense_feature, seg_scaled in zip(dense_features.values(), segs): m = seg_scaled == i + prev_scale_x = 1.0 + prev_scale_y = 1.0 + prev_x = 1.0 + prev_y = 1.0 + # When downscaling the mask it becomes 0 therfore calculate x,y # Based on the previous scale if m.sum() == 0: diff --git a/wild_visual_navigation/feature_extractor/segment_extractor.py b/wild_visual_navigation/feature_extractor/segment_extractor.py index 1aec152b..7095bfdb 100644 --- a/wild_visual_navigation/feature_extractor/segment_extractor.py +++ b/wild_visual_navigation/feature_extractor/segment_extractor.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# # author: Jonas Frey import torch @@ -41,7 +46,7 @@ def adjacency_list(self, seg: torch.tensor): Returns: adjacency_list (torch.Tensor, dtype=torch.long, shape=(N, 2): Adjacency list of undirected graph """ - assert seg.shape[0] == 1 and len(seg.shape) == 4 + assert seg.shape[0] == 1 and len(seg.shape) == 4, f"{seg.shape}" res = self.f1(seg.type(torch.float32)) boundary_mask = (res != 0)[0, :, 2:-2, 2:-2] diff --git a/wild_visual_navigation/feature_extractor/stego_interface.py b/wild_visual_navigation/feature_extractor/stego_interface.py index 6bd9b0b2..d399603a 100644 --- a/wild_visual_navigation/feature_extractor/stego_interface.py +++ b/wild_visual_navigation/feature_extractor/stego_interface.py @@ -1,48 +1,62 @@ -from wild_visual_navigation import WVN_ROOT_DIR -import os +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from os.path import join import torch.nn.functional as F import torch -import wget from torchvision import transforms as T -from stego.src import LitUnsupervisedSegmenter +from omegaconf import OmegaConf -# from stego.src import get_transform -from stego.src import dense_crf +from pytictac import Timer +from stego import STEGO_ROOT_DIR +from stego.stego import Stego +from stego.data import create_cityscapes_colormap class StegoInterface: - def __init__(self, device: str, input_size: int = 448, input_interp: str = "bilinear"): - self.model = self.load() - self.model.to(device) - self.device = device - self._input_size = input_size - self._input_interp = input_interp - - if self._input_interp == "bilinear": - interp = T.InterpolationMode.BILINEAR - elif self._input_interp == "nearest": - interp = T.InterpolationMode.NEAREST - - # Transformation for testing - self.transform = T.Compose( + def __init__( + self, + device: str, + input_size: int = 448, + model_path: str = f"{STEGO_ROOT_DIR}/models/stego_cocostuff27_vit_base_5_cluster_linear_fine_tuning.ckpt", + n_image_clusters: int = 40, + run_crf: bool = True, + run_clustering: bool = False, + cfg: OmegaConf = OmegaConf.create({}), + ): + # Load config + if cfg.is_empty(): + self._cfg = OmegaConf.create( + { + "model_path": model_path, + "input_size": input_size, + "run_crf": run_crf, + "run_clustering": run_clustering, + "n_image_clusters": n_image_clusters, + } + ) + else: + self._cfg = cfg + + self._model = Stego.load_from_checkpoint(self._cfg.model_path, n_image_clusters=self._cfg.n_image_clusters) + self._model.eval().to(device) + self._device = device + + # Colormap + self._cmap = create_cityscapes_colormap() + + # Other + normalization = T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) + self._transform = T.Compose( [ - T.Resize(self._input_size, interp), - T.CenterCrop(self._input_size), - T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]), + T.Resize(input_size, T.InterpolationMode.NEAREST), + T.CenterCrop(input_size), + normalization, ] ) - self.crop = T.Compose( - [ - T.Resize(self._input_size, interp), - T.CenterCrop(self._input_size), - ] - ) - - # Just normalization - self.norm = T.Normalize([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]) - # Internal variables to access internal data self._features = None self._segments = None @@ -53,25 +67,8 @@ def change_device(self, device): Args: device (str): new device """ - self.model.to(device) - self.device = device - - def load(self): - """Loads model. - - Returns: - model (stego.src.train_segmentation.LitUnsupervisedSegmenter): Pretrained model - """ - model_name = "cocostuff27_vit_base_5.ckpt" - model_path = join(WVN_ROOT_DIR, "assets", "stego", model_name) - if not os.path.exists(model_path): - os.makedirs(join(WVN_ROOT_DIR, "assets", "stego"), exist_ok=True) - saved_model_url_root = "https://marhamilresearch4.blob.core.windows.net/stego-public/saved_models/" - saved_model_name = model_name - wget.download(saved_model_url_root + saved_model_name, model_path) - - model = LitUnsupervisedSegmenter.load_from_checkpoint(model_path) - return model + self._model.to(device) + self._device = device @torch.no_grad() def inference(self, img: torch.tensor): @@ -83,47 +80,47 @@ def inference(self, img: torch.tensor): linear_probs (torch.tensor, dtype=torch.float32, shape=(BS,C,H,W)): Linear prediction cluster_probs (torch.tensor, dtype=torch.float32, shape=(BS,C,H,W)): Cluster prediction """ - assert 1 == img.shape[0] - - img = self.norm(img).to(self.device) - code1 = self.model(img) - code2 = self.model(img.flip(dims=[3])) - code = (code1 + code2.flip(dims=[3])) / 2 - code = F.interpolate(code, img.shape[-2:], mode="bilinear", align_corners=False) - linear_probs = torch.log_softmax(self.model.linear_probe(code), dim=1) - cluster_probs = self.model.cluster_probe(code, 2, log_probs=True) - - # Save segments - self._code = code - self._segments = linear_probs - - return linear_probs, cluster_probs - - @torch.no_grad() - def inference_crf(self, img: torch.tensor): - """ - Args: - img (torch.tensor, dtype=type.torch.float32): Input image - - Returns: - linear_pred (torch.tensor, dtype=torch.int64): Linear prediction - cluster_pred (torch.tensor, dtype=torch.int64): Cluster prediction - """ + # assert 1 == img.shape[0] + + # Resize image and normalize + # with Timer("input normalization"): + resized_img = self._transform(img).to(self._device) + + # Run STEGO + # with Timer("compute code"): + self._code = self._model.get_code(resized_img) + + # with Timer("compute postprocess"): + self._cluster_pred, self._linear_pred = self._model.postprocess( + code=self._code, + img=resized_img, + use_crf_cluster=self._cfg.run_crf, + use_crf_linear=self._cfg.run_crf, + image_clustering=self._cfg.run_clustering, + ) - linear_probs, cluster_probs = self.inference(img) - single_img = img[0].cpu() - self._linear_pred = dense_crf(single_img, linear_probs[0]).argmax(0) - self._cluster_pred = dense_crf(single_img, cluster_probs[0]).argmax(0) + # resize and interpolate features + # with Timer("interpolate output"): + B, D, H, W = img.shape + new_features_size = (H, H) + # pad = int((W - H) / 2) + self._code = F.interpolate(self._code, new_features_size, mode="bilinear", align_corners=True) + self._cluster_pred = F.interpolate(self._cluster_pred[None].float(), new_features_size, mode="nearest").int() + self._linear_pred = F.interpolate(self._linear_pred[None].float(), new_features_size, mode="nearest").int() return self._linear_pred, self._cluster_pred @property - def input_size(self): - return self._input_size + def model(self): + return self._model + + @property + def cmap(self): + return self._cmap @property - def input_interpolation(self): - return self._input_interp + def input_size(self): + return self._cfg.input_size @property def linear_segments(self): @@ -140,38 +137,39 @@ def features(self): def run_stego_interfacer(): """Performance inference using stego and stores result as an image.""" - - from wild_visual_navigation.utils import Timer from wild_visual_navigation.visu import get_img_from_fig + from wild_visual_navigation.utils.testing import load_test_image, make_results_folder + from stego.utils import remove_axes import matplotlib.pyplot as plt - from stego.src import unnorm, remove_axes - import cv2 # Create test directory - os.makedirs(join(WVN_ROOT_DIR, "results", "test_stego_interfacer"), exist_ok=True) + outpath = make_results_folder("test_stego_interfacer") # Inference model device = "cuda" if torch.cuda.is_available() else "cpu" - si = StegoInterface(device=device, input_size=448, input_interp="bilinear") + si = StegoInterface( + device=device, + input_size=448, + run_crf=False, + run_clustering=True, + n_image_clusters=20, + ) - p = join(WVN_ROOT_DIR, "assets/images/forest_clean.png") - np_img = cv2.imread(p) - img = torch.from_numpy(cv2.cvtColor(np_img, cv2.COLOR_BGR2RGB)).to(device) - img = img.permute(2, 0, 1) - img = (img.type(torch.float32) / 255)[None] + img = load_test_image().to(device) + img = F.interpolate(img, scale_factor=0.5) - with Timer(f"Stego (input {si.input_size}, interp: {si.input_interpolation})"): - linear_pred, cluster_pred = si.inference_crf(si.crop(img)) + with Timer(f"Stego input {si.input_size}\n"): + linear_pred, cluster_pred = si.inference(img) # Plot result as in colab fig, ax = plt.subplots(1, 3, figsize=(5 * 3, 5)) - ax[0].imshow(si.crop(img).permute(0, 2, 3, 1)[0].cpu()) + ax[0].imshow(img[0].permute(1, 2, 0).cpu().numpy()) ax[0].set_title("Image") - ax[1].imshow(si.model.label_cmap[cluster_pred]) + ax[1].imshow(si.cmap[cluster_pred[0, 0].cpu() % si.cmap.shape[0]]) ax[1].set_title("Cluster Predictions") - ax[2].imshow(si.model.label_cmap[linear_pred]) + ax[2].imshow(si.cmap[linear_pred[0, 0].cpu()]) ax[2].set_title("Linear Probe Predictions") remove_axes(ax) @@ -179,10 +177,8 @@ def run_stego_interfacer(): img = get_img_from_fig(fig) img.save( join( - WVN_ROOT_DIR, - "results", - "test_stego_interfacer", - f"forest_clean_stego_{si.input_size}_{si.input_interpolation}.png", + outpath, + f"forest_clean_stego_{si.input_size}.png", ) ) diff --git a/wild_visual_navigation/feature_extractor/torchvision_interface.py b/wild_visual_navigation/feature_extractor/torchvision_interface.py index b9d2e683..bccf80cd 100644 --- a/wild_visual_navigation/feature_extractor/torchvision_interface.py +++ b/wild_visual_navigation/feature_extractor/torchvision_interface.py @@ -1,9 +1,12 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation import WVN_ROOT_DIR -import os from os.path import join from torch import nn -import torch.nn.functional as F import torch import torchvision.models as models @@ -120,31 +123,19 @@ def inference(self, img: torch.tensor, interpolate: bool = False): def run_torch_vision_model_interfacer(): """Performance inference using stego and stores result as an image.""" - - from wild_visual_navigation.utils import Timer - from wild_visual_navigation.visu import get_img_from_fig - import matplotlib.pyplot as plt - from stego.src import remove_axes - import cv2 - - # Create test directory - # os.makedirs(join(WVN_ROOT_DIR, "results", "test_torchvision_interfacer"), exist_ok=True) + from wild_visual_navigation.utils.testing import load_test_image # Inference model device = "cuda" if torch.cuda.is_available() else "cpu" - p = join(WVN_ROOT_DIR, "assets/images/forest_clean.png") - np_img = cv2.imread(p) - img = torch.from_numpy(cv2.cvtColor(np_img, cv2.COLOR_BGR2RGB)).to(device) - img = img.permute(2, 0, 1) - img = (img.type(torch.float32) / 255)[None] + img = load_test_image().to(device) - plot = False - save_features = True + # plot = False + # save_features = True size = 448 model_type = "resnet18" - di = TorchVisionInterface(model_type=model_type, input_size=488) + di = TorchVisionInterface(model_type=model_type, input_size=size) di.to(device) img.to(device) res = di(img) diff --git a/wild_visual_navigation/image_projector/__init__.py b/wild_visual_navigation/image_projector/__init__.py index be10e47c..3c571a78 100644 --- a/wild_visual_navigation/image_projector/__init__.py +++ b/wild_visual_navigation/image_projector/__init__.py @@ -1 +1,6 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from .image_projector import ImageProjector, run_image_projector diff --git a/wild_visual_navigation/image_projector/image_projector.py b/wild_visual_navigation/image_projector/image_projector.py index 0fde0707..f1d8a3c2 100644 --- a/wild_visual_navigation/image_projector/image_projector.py +++ b/wild_visual_navigation/image_projector/image_projector.py @@ -1,6 +1,9 @@ -from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.utils import Timer -import os +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from pytictac import Timer from os.path import join import torch from torchvision import transforms as T @@ -147,7 +150,11 @@ def project(self, pose_camera_in_world: torch.tensor, points_W: torch.tensor): return projected_points, valid_points, valid_z def project_and_render( - self, pose_camera_in_world: torch.tensor, points: torch.tensor, colors: torch.tensor, image: torch.tensor = None + self, + pose_camera_in_world: torch.tensor, + points: torch.tensor, + colors: torch.tensor, + image: torch.tensor = None, ): """Projects the points and returns an image with the projection @@ -197,19 +204,17 @@ def run_image_projector(): """Projects 3D points to example images and returns an image with the projection""" from wild_visual_navigation.visu import get_img_from_fig - from wild_visual_navigation.utils import Timer - from wild_visual_navigation.utils import make_plane, make_box, make_dense_plane, make_polygon_from_points - from PIL import Image + from wild_visual_navigation.utils import ( + make_polygon_from_points, + ) + from wild_visual_navigation.utils.testing import load_test_image, make_results_folder import matplotlib.pyplot as plt import torch - import torchvision.transforms as transforms from kornia.utils import tensor_to_image - from stego.src import remove_axes - - to_tensor = transforms.ToTensor() + from stego.utils import remove_axes # Create test directory - os.makedirs(join(WVN_ROOT_DIR, "results", "test_image_projector"), exist_ok=True) + outpath = make_results_folder("test_image_projector") # Define number of cameras (batch) B = 10 @@ -229,17 +234,14 @@ def run_image_projector(): R_WC = SO3.from_rpy(phi) # Rotation matrix from roll-pitch-yaw pose_camera_in_world[i] = SE3(R_WC, rho).as_matrix() # Pose matrix of camera in world frame # Image size - H = 1080 - W = 1440 + H = torch.tensor(1080) + W = torch.tensor(1440) # Create projector im = ImageProjector(K, H, W) # Load image - pil_img = Image.open(join(WVN_ROOT_DIR, "assets/images/forest_clean.png")) - - # Convert to torch - k_img = to_tensor(pil_img) + k_img = load_test_image() k_img = k_img.expand(B, 3, H, W) k_img = im.resize_image(k_img) @@ -269,7 +271,7 @@ def run_image_projector(): for x in range(-3, 3, 1): try: k_points_overlay[:, idx[1].item() + y, idx[0].item() + x] = torch.tensor([0, 255, 0]) - except Exception as e: + except Exception: continue ax[i, 0].imshow(tensor_to_image(k_img[i])) @@ -285,7 +287,12 @@ def run_image_projector(): # Store results to test directory img = get_img_from_fig(fig) - img.save(join(WVN_ROOT_DIR, "results", "test_image_projector", "forest_clean_image_projector.png")) + img.save( + join( + outpath, + "forest_clean_image_projector.png", + ) + ) if __name__ == "__main__": diff --git a/wild_visual_navigation/learning/dataset/__init__.py b/wild_visual_navigation/learning/dataset/__init__.py deleted file mode 100644 index d44093de..00000000 --- a/wild_visual_navigation/learning/dataset/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -from .graph_trav_dataset import get_ablation_module -from .twist_dataset import TwistDataset, TwistDataModule diff --git a/wild_visual_navigation/learning/general/training_routine.py b/wild_visual_navigation/learning/general/training_routine.py deleted file mode 100644 index 28f1451d..00000000 --- a/wild_visual_navigation/learning/general/training_routine.py +++ /dev/null @@ -1,124 +0,0 @@ -import os -import yaml -import dataclasses -import pickle -import warnings - -warnings.filterwarnings("ignore", ".*does not have many workers.*") - -# Frameworks -import torch -import pytorch_lightning -from pytorch_lightning import seed_everything, Trainer -from pytorch_lightning.callbacks import EarlyStopping -from pytorch_lightning.callbacks import ModelCheckpoint -from pytorch_lightning.callbacks import LearningRateMonitor -from pytorch_lightning.profiler import AdvancedProfiler -from pytorch_lightning.utilities import rank_zero_warn, rank_zero_only -from pytorch_lightning.plugins import DDP2Plugin, DDPPlugin, DDPSpawnPlugin - -# Costume Modules -from wild_visual_navigation.learning.utils import get_logger -from wild_visual_navigation.learning.lightning import LightningTrav -from wild_visual_navigation.learning.utils import load_yaml, load_env, create_experiment_folder -from wild_visual_navigation.learning.dataset import get_ablation_module -from wild_visual_navigation.cfg import ExperimentParams - -__all__ = ["training_routine"] - - -def training_routine(experiment: ExperimentParams, seed=42) -> torch.Tensor: - experiment.verify_params() - seed_everything(seed) - exp = dataclasses.asdict(experiment) - env = load_env() - - if exp["general"]["log_to_disk"]: - model_path = create_experiment_folder(exp, env) - exp["general"]["name"] = os.path.relpath(model_path, env["base"]) - - with open(os.path.join(model_path, "experiment_params.yaml"), "w") as f: - yaml.dump(exp, f, default_flow_style=False) - - exp["trainer"]["default_root_dir"] = model_path - exp["visu"]["learning_visu"]["p_visu"] = os.path.join(exp["general"]["model_path"], "visu") - - logger = get_logger(exp, env) - - # Set gpus - if (exp["trainer"]).get("gpus", -1) == -1: - nr = torch.cuda.device_count() - print(f"Set GPU Count for Trainer to {nr}!") - for i in range(nr): - print(f"Device {i}: " + str(torch.cuda.get_device_name(i))) - exp["trainer"]["gpus"] = -1 - - # Profiler - if exp["trainer"].get("profiler", False) == "advanced": - exp["trainer"]["profiler"] = AdvancedProfiler(dirpath=model_path, filename="profile.txt") - else: - exp["trainer"]["profiler"] = False - - # Callbacks - cb_ls = [] - if logger is not None: - cb_ls.append(LearningRateMonitor(**exp["lr_monitor"])) - - if exp["cb_early_stopping"]["active"]: - early_stop_callback = EarlyStopping(**exp["cb_early_stopping"]["cfg"]) - cb_ls.appned(early_stop_callback) - - if exp["cb_checkpoint"]["active"]: - checkpoint_callback = ModelCheckpoint( - dirpath=model_path, save_top_k=1, monitor="epoch", mode="max", save_last=True - ) - cb_ls.append(checkpoint_callback) - - gpus = 1 if torch.cuda.is_available() else None - exp["trainer"]["gpus"] = gpus - - train_dl, val_dl, test_dl = get_ablation_module( - **exp["ablation_data_module"], - perugia_root=env["perugia_root"], - get_train_val_dataset=not exp["general"]["skip_train"], - get_test_dataset=not exp["ablation_data_module"]["val_equals_test"], - ) - - # Set correct input feature dimension - if train_dl is not None: - sample = train_dl.dataset[0] - else: - sample = test_dl[0].dataset[0] - - input_feature_dimension = sample.x.shape[1] - - exp["model"]["simple_mlp_cfg"]["input_size"] = input_feature_dimension - exp["model"]["simple_gcn_cfg"]["input_size"] = input_feature_dimension - - # Model - model = LightningTrav(exp=exp, env=env) - if type(exp["model"]["load_ckpt"]) == str: - ckpt = torch.load(exp["model"]["load_ckpt"]) - try: - res = model.load_state_dict(ckpt["state_dict"], strict=False) - except: - res = model.load_state_dict(ckpt, strict=False) - print("Loaded model checkpoint:", res) - trainer = Trainer(**exp["trainer"], callbacks=cb_ls, logger=logger) - - if not exp["general"]["skip_train"]: - trainer.fit(model=model, train_dataloaders=train_dl, val_dataloaders=val_dl) - - if exp["ablation_data_module"]["val_equals_test"]: - return model.accumulated_val_results, model - - test_envs = [] - for j, dl in enumerate(test_dl): - if exp["loss"]["w_trav"] == 0: - model._traversability_loss._anomaly_threshold = None - - model.nr_test_run = j - res = trainer.test(model=model, dataloaders=dl)[0] - test_envs.append(dl.dataset.env) - - return {k: v for k, v in zip(test_envs, model.accumulated_test_results)}, model diff --git a/wild_visual_navigation/learning/model/__init__.py b/wild_visual_navigation/learning/model/__init__.py deleted file mode 100644 index 5fc47920..00000000 --- a/wild_visual_navigation/learning/model/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -from .simple_mlp import SimpleMLP, DoubleMLP -from .simple_gcn import SimpleGCN -from .network_register import get_model diff --git a/wild_visual_navigation/learning/utils/__init__.py b/wild_visual_navigation/learning/utils/__init__.py deleted file mode 100644 index 5872fd1d..00000000 --- a/wild_visual_navigation/learning/utils/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -from .flatten_dict import * -from .get_logger import get_logger, get_neptune_run -from .loading import load_env, load_yaml, file_path -from .create_experiment_folder import create_experiment_folder -from .get_confidence import get_confidence -from .loss import TraversabilityLoss -from .metric_logger import MetricLogger diff --git a/wild_visual_navigation/learning/utils/get_logger.py b/wild_visual_navigation/learning/utils/get_logger.py deleted file mode 100644 index 00353184..00000000 --- a/wild_visual_navigation/learning/utils/get_logger.py +++ /dev/null @@ -1,110 +0,0 @@ -from pytorch_lightning.loggers.neptune import NeptuneLogger -from pytorch_lightning.loggers import TensorBoardLogger, WandbLogger -from wild_visual_navigation.learning.utils import flatten_dict -import inspect -import os -import neptune.new as neptune - -__all__ = ["get_neptune_logger", "get_wandb_logger", "get_tensorboard_logger", "get_neptune_run"] - -PROXIES = {"http": "http://proxy.ethz.ch:3128", "https": "http://proxy.ethz.ch:3128"} - - -def get_neptune_run(neptune_project_name: str, tags: [str]) -> any: - """Get neptune run - - Args: - neptune_project_name (str): Neptune project name - tags (list of str): Tags to identify the project - """ - proxies = None - if os.environ["ENV_WORKSTATION_NAME"] == "euler": - proxies = PROXIES - - run = neptune.init( - api_token=os.environ["NEPTUNE_API_TOKEN"], - project=neptune_project_name, - tags=[os.environ["ENV_WORKSTATION_NAME"]] + tags, - proxies=proxies, - ) - return run - - -def get_neptune_logger(exp: dict, env: dict) -> NeptuneLogger: - """Returns NeptuneLogger - - Args: - exp (dict): Content of environment file - env (dict): Content of experiment file - Returns: - (logger): Logger - """ - project_name = exp["logger"]["neptune_project_name"] # Neptune AI project_name "username/project" - - params = flatten_dict(exp) - - name_full = exp["general"]["name"] - name_short = "__".join(name_full.split("/")[-2:]) - - proxies = None - if os.environ["ENV_WORKSTATION_NAME"] == "euler": - proxies = PROXIES - - return NeptuneLogger( - api_key=os.environ["NEPTUNE_API_TOKEN"], - project=project_name, - name=name_short, - tags=[os.environ["ENV_WORKSTATION_NAME"], name_full.split("/")[-2], name_full.split("/")[-1]], - proxies=proxies, - ) - - -def get_wandb_logger(exp: dict, env: dict) -> WandbLogger: - """Returns NeptuneLogger - - Args: - exp (dict): Content of environment file - - Returns: - (logger): Logger - """ - project_name = exp["logger"]["wandb_project_name"] # project_name (str): W&B project_name - save_dir = os.path.join(exp["general"]["model_path"]) # save_dir (str): File path to save directory - params = flatten_dict(exp) - name_full = exp["general"]["name"] - name_short = "__".join(name_full.split("/")[-2:]) - return WandbLogger( - name=name_short, project=project_name, entity=exp["logger"]["wandb_entity"], save_dir=save_dir, offline=False - ) - - -def get_tensorboard_logger(exp: dict, env: dict) -> TensorBoardLogger: - """Returns TensorboardLoggers - - Args: - exp (dict): Content of environment file - - Returns: - (logger): Logger - """ - params = flatten_dict(exp) - return TensorBoardLogger(save_dir=exp["name"], name="tensorboard", default_hp_metric=params) - - -def get_skip_logger(exp: dict, env: dict) -> None: - """Returns None - - Args: - exp (dict): Content of environment file - - Returns: - (logger): Logger - """ - return None - - -def get_logger(exp: dict, env: dict) -> any: - name = exp["logger"]["name"] - save_dir = os.path.join(env["base"], exp["general"]["name"]) - register = {k: v for k, v in globals().items() if inspect.isfunction(v)} - return register[f"get_{name}_logger"](exp, env) diff --git a/wild_visual_navigation/model/__init__.py b/wild_visual_navigation/model/__init__.py new file mode 100644 index 00000000..d21a5c70 --- /dev/null +++ b/wild_visual_navigation/model/__init__.py @@ -0,0 +1,9 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from .simple_mlp import SimpleMLP, DoubleMLP +from .simple_gcn import SimpleGCN +from .linear_rnvp import LinearRnvp +from .network_register import get_model diff --git a/wild_visual_navigation/model/linear_rnvp.py b/wild_visual_navigation/model/linear_rnvp.py new file mode 100644 index 00000000..fa63b286 --- /dev/null +++ b/wild_visual_navigation/model/linear_rnvp.py @@ -0,0 +1,296 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from wild_visual_navigation.utils import Data +import copy +import torch +from torch import nn, distributions + + +class LinearBatchNorm(nn.Module): + """ + An (invertible) batch normalization layer. + This class is mostly inspired from this one: + https://github.com/kamenbliznashki/normalizing_flows/blob/master/maf.py + """ + + def __init__(self, input_size, momentum=0.9, eps=1e-5): + super().__init__() + self.momentum = momentum + self.eps = eps + + self.log_gamma = nn.Parameter(torch.zeros(input_size)) + self.beta = nn.Parameter(torch.zeros(input_size)) + + self.register_buffer("running_mean", torch.zeros(input_size)) + self.register_buffer("running_var", torch.ones(input_size)) + + def forward(self, x, **kwargs): + if self.training: + self.batch_mean = x.mean(0) + self.batch_var = x.var(0) + + self.running_mean.mul_(self.momentum).add_(self.batch_mean.data * (1 - self.momentum)) + self.running_var.mul_(self.momentum).add_(self.batch_var.data * (1 - self.momentum)) + + mean = self.batch_mean + var = self.batch_var + else: + mean = self.running_mean + var = self.running_var + + x_hat = (x - mean) / torch.sqrt(var + self.eps) + y = self.log_gamma.exp() * x_hat + self.beta + + log_det = self.log_gamma - 0.5 * torch.log(var + self.eps) + + return y, log_det.expand_as(x).sum(1) + + def backward(self, x, **kwargs): + if self.training: + mean = self.batch_mean + var = self.batch_var + else: + mean = self.running_mean + var = self.running_var + + x_hat = (x - self.beta) * torch.exp(-self.log_gamma) + x = x_hat * torch.sqrt(var + self.eps) + mean + + log_det = 0.5 * torch.log(var + self.eps) - self.log_gamma + + return x, log_det.expand_as(x).sum(1) + + +class LinearCouplingLayer(nn.Module): + """ + Linear coupling layer. + (i) Split the input x into 2 parts x1 and x2 according to a given mask. + (ii) Compute s(x2) and t(x2) with given neural network. + (iii) Final output is [exp(s(x2))*x1 + t(x2); x2]. + The inverse is trivially [(x1 - t(x2))*exp(-s(x2)); x2]. + """ + + def __init__( + self, + input_size, + mask, + network_topology, + conditioning_size=None, + single_function=True, + ): + super().__init__() + + if conditioning_size is None: + conditioning_size = 0 + + if network_topology is None or len(network_topology) == 0: + network_topology = [input_size] + + self.register_buffer("mask", mask) + + self.dim = input_size + + self.s = [ + nn.Linear(input_size + conditioning_size, network_topology[0]), + nn.ReLU(), + ] + + for i in range(len(network_topology)): + t = network_topology[i] + t_p = network_topology[i - 1] + self.s.extend([nn.Linear(t_p, t), nn.ReLU()]) + + if single_function: + input_size = input_size * 2 + + ll = nn.Linear(network_topology[-1], input_size) + + self.s.append(ll) + self.s = nn.Sequential(*self.s) + + if single_function: + self.st = lambda x: (self.s(x).chunk(2, 1)) + else: + self.t = copy.deepcopy(self.s) + self.st = lambda x: (self.s(x), self.t(x)) + + def backward(self, x, y=None): + mx = x * self.mask + + if y is not None: + _mx = torch.cat([y, mx], dim=1) + else: + _mx = mx + + s, t = self.st(_mx) + s = torch.tanh(s) # Adding an activation function here with non-linearities + + u = mx + (1 - self.mask) * (x - t) * torch.exp(-s) + + log_abs_det_jacobian = -(1 - self.mask) * s + + return u, log_abs_det_jacobian.sum(1) + + def forward(self, u, y=None): + mu = u * self.mask + + if y is not None: + _mu = torch.cat([y, mu], dim=1) + else: + _mu = mu + + s, t = self.st(_mu) + s = torch.tanh(s) + + x = mu + (1 - self.mask) * (u * s.exp() + t) + + log_abs_det_jacobian = (1 - self.mask) * s + + return x, log_abs_det_jacobian.sum(1) + + +class Permutation(nn.Module): + """ + A permutation layer. + """ + + def __init__(self, in_ch): + super().__init__() + self.in_ch = in_ch + self.register_buffer("p", torch.randperm(in_ch)) + self.register_buffer("invp", torch.argsort(self.p)) + + def forward(self, x, y=None): + assert x.shape[1] == self.in_ch + out = x[:, self.p] + return out, 0 + + def backward(self, x, y=None): + assert x.shape[1] == self.in_ch + out = x[:, self.invp] + return out, 0 + + +class SequentialFlow(nn.Sequential): + """ + Utility class to build a normalizing flow from a sequence of base transformations. + During forward and inverse steps, aggregates the sum of the log determinants of the Jacobians. + """ + + def forward(self, x, y=None): + log_det = 0 + for module in self: + x, _log_det = module(x, y=y) + log_det = log_det + _log_det + return x, log_det + + def backward(self, u, y=None): + log_det = 0 + for module in reversed(self): + u, _log_det = module.backward(u, y=y) + log_det = log_det + _log_det + return u, log_det + + def forward_steps(self, x, y=None): + log_det = 0 + xs = [x] + for module in self: + x, _log_det = module(x, y=y) + xs.append(x) + log_det = log_det + _log_det + return xs, log_det + + def backward_steps(self, u, y=None): + log_det = 0 + us = [u] + for module in reversed(self): + u, _log_det = module.backward(u, y=y) + us.append(u) + log_det = log_det + _log_det + return us, log_det + + +class LinearRnvp(nn.Module): + """ + Main RNVP model, alternating affine coupling layers + with permutations and/or batch normalization steps. + """ + + def __init__( + self, + input_size, + coupling_topology, + flow_n=2, + use_permutation=False, + batch_norm=False, + mask_type="odds", + conditioning_size=None, + single_function=False, + **kwargs + ): + super().__init__() + + self.register_buffer("prior_mean", torch.zeros(input_size)) # Normal Gaussian with zero mean + self.register_buffer("prior_var", torch.ones(input_size)) # Normal Gaussian with unit variance + + if mask_type == "odds": + mask = torch.arange(0, input_size).float() % 2 + elif mask_type == "half": + mask = torch.zeros(input_size) + mask[: input_size // 2] = 1 + else: + assert False + + if coupling_topology is None: + coupling_topology = [input_size // 2, input_size // 2] + + blocks = [] + + for i in range(flow_n): + blocks.append( + LinearCouplingLayer( + input_size, + mask, + network_topology=coupling_topology, + conditioning_size=conditioning_size, + single_function=single_function, + ) + ) + if use_permutation: + blocks.append(Permutation(input_size)) + else: + mask = 1 - mask + + if batch_norm: + blocks.append(LinearBatchNorm(input_size)) + + self.flows = SequentialFlow(*blocks) + + def logprob(self, x): + return self.prior.log_prob(x) # Compute log probability of the input at the Gaussian distribution + + @property + def prior(self): + return distributions.Normal(self.prior_mean, self.prior_var) # Normal Gaussian with zero mean and unit variance + + def forward(self, data: Data): + x = data.x + z, log_det = self.flows.forward(x, y=None) + log_prob = self.logprob(z) + return {"z": z, "log_det": log_det, "logprob": log_prob} + + def backward(self, u, y=None, return_step=False): + if return_step: + return self.flows.backward_steps(u, y) + return self.flows.backward(u, y) + + def sample(self, samples=1, y=None, return_step=False, return_logdet=False): + u = self.prior.sample((samples,)) + z, d = self.backward(u, y=y, return_step=return_step) + if return_logdet: + d = self.logprob(u).sum(1) + d + return z, d + return z diff --git a/wild_visual_navigation/learning/model/network_register.py b/wild_visual_navigation/model/network_register.py similarity index 85% rename from wild_visual_navigation/learning/model/network_register.py rename to wild_visual_navigation/model/network_register.py index 7883cda5..fb991df8 100644 --- a/wild_visual_navigation/learning/model/network_register.py +++ b/wild_visual_navigation/model/network_register.py @@ -1,4 +1,9 @@ -from wild_visual_navigation.learning.model import * +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from wild_visual_navigation.model import * import inspect import torch @@ -52,7 +57,7 @@ def get_model(model_cfg: dict) -> torch.nn.Module: if __name__ == "__main__": from wild_visual_navigation import WVN_ROOT_DIR - from wild_visual_navigation.learning.utils import load_yaml + from wild_visual_navigation.utils import load_yaml from os.path import join exp = load_yaml(join(WVN_ROOT_DIR, "cfg/exp/exp.yaml")) diff --git a/wild_visual_navigation/learning/model/simple_gcn.py b/wild_visual_navigation/model/simple_gcn.py similarity index 79% rename from wild_visual_navigation/learning/model/simple_gcn.py rename to wild_visual_navigation/model/simple_gcn.py index 4beb7955..419e4f24 100644 --- a/wild_visual_navigation/learning/model/simple_gcn.py +++ b/wild_visual_navigation/model/simple_gcn.py @@ -1,7 +1,13 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import torch import torch.nn.functional as F -from torch_geometric.nn import GCNConv -from torch_geometric.data import Data + +# from torch_geometric.nn import GCNConv +from wild_visual_navigation.utils import Data class SimpleGCN(torch.nn.Module): diff --git a/wild_visual_navigation/learning/model/simple_mlp.py b/wild_visual_navigation/model/simple_mlp.py similarity index 84% rename from wild_visual_navigation/learning/model/simple_mlp.py rename to wild_visual_navigation/model/simple_mlp.py index dea1e40c..bbf0a28a 100644 --- a/wild_visual_navigation/learning/model/simple_mlp.py +++ b/wild_visual_navigation/model/simple_mlp.py @@ -1,10 +1,19 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from wild_visual_navigation.utils import Data import torch -from torch_geometric.data import Data -import torch.nn.functional as F class SimpleMLP(torch.nn.Module): - def __init__(self, input_size: int = 64, hidden_sizes: [int] = [255], reconstruction: bool = False): + def __init__( + self, + input_size: int = 64, + hidden_sizes: [int] = [255], + reconstruction: bool = False, + ): super(SimpleMLP, self).__init__() layers = [] self.nr_sigmoid_layers = hidden_sizes[-1] diff --git a/wild_visual_navigation/supervision_generator/__init__.py b/wild_visual_navigation/supervision_generator/__init__.py index 7d990007..cb5e5f91 100644 --- a/wild_visual_navigation/supervision_generator/__init__.py +++ b/wild_visual_navigation/supervision_generator/__init__.py @@ -1 +1,7 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from .twist_dataset import TwistDataset, TwistDataModule from .supervision_generator import SupervisionGenerator, run_supervision_generator diff --git a/wild_visual_navigation/supervision_generator/supervision_generator.py b/wild_visual_navigation/supervision_generator/supervision_generator.py index b44190f6..be32f62a 100644 --- a/wild_visual_navigation/supervision_generator/supervision_generator.py +++ b/wild_visual_navigation/supervision_generator/supervision_generator.py @@ -1,27 +1,30 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation import WVN_ROOT_DIR from wild_visual_navigation.traversability_estimator.graphs import DistanceWindowGraph from wild_visual_navigation.traversability_estimator.nodes import TwistNode from wild_visual_navigation.utils import KalmanFilter -from liegroups import SE2, SE3 -from os.path import join +from liegroups import SE3 import os import torch -import torch.nn as nn class SupervisionGenerator: def __init__( self, - device: str = "cuda", - kf_process_cov: float = 0.01, - kf_meas_cov: float = 10, - kf_outlier_rejection: str = "none", - kf_outlier_rejection_delta: float = 1.0, - sigmoid_slope: float = 15, - sigmoid_cutoff: float = 0.2, - untraversable_thr: float = 0.1, - time_horizon: float = 1, - graph_max_length: float = 1, + device: str, + kf_process_cov, + kf_meas_cov, + kf_outlier_rejection, + kf_outlier_rejection_delta, + sigmoid_slope, + sigmoid_cutoff, + untraversable_thr, + time_horizon, + graph_max_length, ): """Generates traversability signals/labels from different sources @@ -102,9 +105,6 @@ def update_velocity_tracking( S = self.get_velocity_selection_matrix(velocities).to(self.device) - # Get dimensionality of input - N = current_velocity.shape[-1] - # Compute discrepancy error = (torch.nn.functional.mse_loss(S @ current_velocity, S @ desired_velocity)) / max_velocity @@ -185,7 +185,7 @@ def untraversable_thr(self): def run_supervision_generator(): """Projects 3D points to example images and returns an image with the projection""" - from wild_visual_navigation.learning.dataset import TwistDataset + from wild_visual_navigation.supervision_generator import TwistDataset import matplotlib.pyplot as plt import numpy as np import pandas as pd @@ -206,6 +206,8 @@ def run_supervision_generator(): sigmoid_slope=30, sigmoid_cutoff=0.2, untraversable_thr=0.05, + time_horizon=0.05, + graph_max_length=1, ) # Saved data list @@ -220,9 +222,21 @@ def run_supervision_generator(): trav, trav_cov, is_untrav = ag.update_velocity_tracking( curr[0], des[0], max_velocity=0.8, velocities=["vx", "vy"] ) - saved_data.append([t.item(), curr.norm().item(), des.norm().item(), trav.item(), trav_cov.item(), is_untrav]) + saved_data.append( + [ + t.item(), + curr.norm().item(), + des.norm().item(), + trav.item(), + trav_cov.item(), + is_untrav, + ] + ) - df = pd.DataFrame(saved_data, columns=["ts", "curr", "des", "trav", "trav_cov", "is_untraversable"]) + df = pd.DataFrame( + saved_data, + columns=["ts", "curr", "des", "trav", "trav_cov", "is_untraversable"], + ) df["ts"] = df["ts"] - df["ts"][0] df["trav_upper"] = df["trav"] + df["trav_cov"] diff --git a/wild_visual_navigation/learning/dataset/twist_dataset.py b/wild_visual_navigation/supervision_generator/twist_dataset.py similarity index 88% rename from wild_visual_navigation/learning/dataset/twist_dataset.py rename to wild_visual_navigation/supervision_generator/twist_dataset.py index 602ddd4f..2e0c906c 100644 --- a/wild_visual_navigation/learning/dataset/twist_dataset.py +++ b/wild_visual_navigation/supervision_generator/twist_dataset.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation import WVN_ROOT_DIR import os import torch @@ -5,8 +10,8 @@ import pandas as pd import pytorch_lightning as pl from pathlib import Path -from torch.utils.data import Dataset, DataLoader, random_split -from typing import Optional, Callable +from torch.utils.data import Dataset, DataLoader +from typing import Optional class TwistDataset(Dataset): @@ -78,7 +83,12 @@ def __init__( # Find closest samples tol = pd.Timedelta(ts_matching_thr) merged_df = pd.merge_asof( - left=current_df, right=desired_df, right_index=True, left_index=True, direction="nearest", tolerance=tol + left=current_df, + right=desired_df, + right_index=True, + left_index=True, + direction="nearest", + tolerance=tol, ) # Reindex to integer indices self.size = merged_df.index.size @@ -129,7 +139,12 @@ def __getitem__(self, idx: int): class TwistDataModule(pl.LightningDataModule): def __init__( - self, root: str, current_filename: str, desired_filename: str, seq_size: int = 16, batch_size: int = 32 + self, + root: str, + current_filename: str, + desired_filename: str, + seq_size: int = 16, + batch_size: int = 32, ): super().__init__() self.root = root @@ -138,10 +153,18 @@ def __init__( self.seq_size = seq_size self.batch_size = batch_size self.data_train = TwistDataset( - self.root, self.current_filename, self.desired_filename, mode="train", seq_size=self.seq_size + self.root, + self.current_filename, + self.desired_filename, + mode="train", + seq_size=self.seq_size, ) self.data_val = TwistDataset( - self.root, self.current_filename, self.desired_filename, mode="val", seq_size=self.seq_size + self.root, + self.current_filename, + self.desired_filename, + mode="val", + seq_size=self.seq_size, ) def prepare_data(self): diff --git a/wild_visual_navigation/traversability_estimator/__init__.py b/wild_visual_navigation/traversability_estimator/__init__.py index 43c2cd3b..f3c6973a 100644 --- a/wild_visual_navigation/traversability_estimator/__init__.py +++ b/wild_visual_navigation/traversability_estimator/__init__.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from .graphs import ( BaseGraph, TemporalWindowGraph, @@ -6,5 +11,5 @@ run_base_graph, run_temporal_window_graph, ) -from .nodes import BaseNode, ProprioceptionNode, MissionNode, TwistNode, run_base_state +from .nodes import BaseNode, SupervisionNode, MissionNode, TwistNode, run_base_state from .traversability_estimator import TraversabilityEstimator diff --git a/wild_visual_navigation/traversability_estimator/graphs.py b/wild_visual_navigation/traversability_estimator/graphs.py index 4064f63d..d4709001 100644 --- a/wild_visual_navigation/traversability_estimator/graphs.py +++ b/wild_visual_navigation/traversability_estimator/graphs.py @@ -1,10 +1,14 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from .nodes import BaseNode import networkx as nx import random from threading import Lock import networkx import torch -from wild_visual_navigation.utils import Timer class BaseGraph: @@ -69,7 +73,11 @@ def add_node(self, node: BaseNode): # Add edge to latest if self._last_added_node is not None: - self._graph.add_edge(node, self._last_added_node, distance=node.distance_to(self._last_added_node)) + self._graph.add_edge( + node, + self._last_added_node, + distance=node.distance_to(self._last_added_node), + ) else: self._first_node = node @@ -144,7 +152,12 @@ def approximate_timestamp_filter(node): return nodes[0] if len(nodes) > 0 else None def get_nodes_within_radius_range( - self, node: BaseNode, min_radius: float, max_radius: float, time_eps: float = 1, metric: str = "dijkstra" + self, + node: BaseNode, + min_radius: float, + max_radius: float, + time_eps: float = 1, + metric: str = "dijkstra", ): # Find closest node in the graph (timestamp). This is useful when we are finding nodes corresponding to another graph closest_node = self.get_node_with_timestamp(node.timestamp, eps=time_eps) @@ -192,7 +205,11 @@ def remove_nodes(self, nodes: list): self._graph.remove_nodes_from(nodes) def remove_nodes_within_radius_range( - self, node: BaseNode, min_radius: float = 0, max_radius: float = float("inf"), metric: str = "dijkstra" + self, + node: BaseNode, + min_radius: float = 0, + max_radius: float = float("inf"), + metric: str = "dijkstra", ): # Significantly faster then checking all the nodes nodes_to_remove = [] @@ -310,7 +327,10 @@ def run_base_graph(): nodes_list = [] for i in range(N): - s = BaseNode(timestamp=i, pose_base_in_world=SE3(SO3.identity(), torch.Tensor([i / 10.0, 0, 0])).as_matrix()) + s = BaseNode( + timestamp=i, + pose_base_in_world=SE3(SO3.identity(), torch.Tensor([i / 10.0, 0, 0])).as_matrix(), + ) nodes_list.append(s) graph.add_node(s) @@ -363,7 +383,10 @@ def run_temporal_window_graph(): nodes_list = [] for i in range(N): t = i - s = BaseNode(timestamp=t, pose_base_in_world=SE3(SO3.identity(), torch.Tensor([i / 10.0, 0, 0])).as_matrix()) + s = BaseNode( + timestamp=t, + pose_base_in_world=SE3(SO3.identity(), torch.Tensor([i / 10.0, 0, 0])).as_matrix(), + ) nodes_list.append(s) graph.add_node(s) assert graph.get_first_node().timestamp >= t - W diff --git a/wild_visual_navigation/traversability_estimator/nodes.py b/wild_visual_navigation/traversability_estimator/nodes.py index 2addb156..42a9ac0b 100644 --- a/wild_visual_navigation/traversability_estimator/nodes.py +++ b/wild_visual_navigation/traversability_estimator/nodes.py @@ -1,10 +1,20 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation.image_projector import ImageProjector -from wild_visual_navigation.utils import make_box, make_plane, make_polygon_from_points, make_dense_plane +from wild_visual_navigation.utils import ( + make_box, + make_plane, + make_polygon_from_points, + make_dense_plane, +) from liegroups.torch import SE3, SO3 -from torch_geometric.data import Data +from wild_visual_navigation.utils import Data + import os import torch -import torch.nn.functional as F from typing import Optional @@ -74,7 +84,10 @@ def distance_to(self, other): """ # Compute pose difference, then log() to get a vector, then extract position coordinates, finally get norm return ( - SE3.from_matrix(self.pose_base_in_world.inverse() @ other.pose_base_in_world, normalize=True) + SE3.from_matrix( + self.pose_base_in_world.inverse() @ other.pose_base_in_world, + normalize=True, + ) .log()[:3] .norm() ) @@ -114,7 +127,6 @@ def __init__( pose_cam_in_world: torch.tensor = None, image: torch.tensor = None, image_projector: ImageProjector = None, - correspondence=None, camera_name="cam", use_for_training=True, ): @@ -131,7 +143,6 @@ def __init__( # Uninitialized members self._features = None - self._feature_type = None self._feature_edges = None self._feature_segments = None self._feature_positions = None @@ -139,7 +150,6 @@ def __init__( self._supervision_mask = None self._supervision_signal = None self._supervision_signal_valid = None - self._correspondence = correspondence self._confidence = None def clear_debug_data(self): @@ -161,10 +171,12 @@ def change_device(self, device): """ super().change_device(device) self._image_projector.change_device(device) - self._image = self._image.to(device) + self._pose_cam_in_base = self._pose_cam_in_base.to(device) self._pose_cam_in_world = self._pose_cam_in_world.to(device) + if self._image is not None: + self._image = self._image.to(device) if self._features is not None: self._features = self._features.to(device) if self._feature_edges is not None: @@ -181,38 +193,57 @@ def change_device(self, device): self._supervision_signal = self._supervision_signal.to(device) if self._supervision_signal_valid is not None: self._supervision_signal_valid = self._supervision_signal_valid.to(device) - if self._correspondence is not None: - self._correspondence = self._correspondence.to(device) if self._confidence is not None: self._confidence = self._confidence.to(device) - def as_pyg_data(self, previous_node: Optional[BaseNode] = None, aux: bool = False): + def as_pyg_data( + self, + previous_node: Optional[BaseNode] = None, + anomaly_detection: bool = False, + aux: bool = False, + ): if aux: return Data(x=self.features, edge_index=self._feature_edges) if previous_node is None: - return Data( - x=self.features, - edge_index=self._feature_edges, - y=self._supervision_signal, - y_valid=self._supervision_signal_valid, - ) + if anomaly_detection: + return Data( + x=self.features[self._supervision_signal_valid], + edge_index=self._feature_edges, + y=self._supervision_signal[self._supervision_signal_valid], + y_valid=self._supervision_signal_valid[self._supervision_signal_valid], + ) + else: + return Data( + x=self.features, + edge_index=self._feature_edges, + y=self._supervision_signal, + y_valid=self._supervision_signal_valid, + ) + else: - return Data( - x=self.features, - edge_index=self._feature_edges, - y=self._supervision_signal, - y_valid=self._supervision_signal_valid, - x_previous=previous_node.features, - edge_index_previous=previous_node._feature_edges, - correspondence=self._correspondence, - ) + if anomaly_detection: + return Data( + x=self.features[self._supervision_signal_valid], + edge_index=self._feature_edges, + y=self._supervision_signal[self._supervision_signal_valid], + y_valid=self._supervision_signal_valid[self._supervision_signal_valid], + x_previous=previous_node.features, + edge_index_previous=previous_node._feature_edges, + ) + else: + return Data( + x=self.features, + edge_index=self._feature_edges, + y=self._supervision_signal, + y_valid=self._supervision_signal_valid, + x_previous=previous_node.features, + edge_index_previous=previous_node._feature_edges, + ) def is_valid(self): - valid_members = ( isinstance(self._features, torch.Tensor) and isinstance(self._supervision_signal, torch.Tensor) - and isinstance(self._correspondence, torch.Tensor) and isinstance(self._supervision_signal_valid, torch.Tensor) ) valid_signals = self._supervision_signal_valid.any() if valid_members else False @@ -227,18 +258,10 @@ def camera_name(self): def confidence(self): return self._confidence - @property - def correspondence(self): - return self._correspondence - @property def features(self): return self._features - @property - def feature_type(self): - return self._feature_type - @property def feature_edges(self): return self._feature_edges @@ -291,18 +314,10 @@ def camera_name(self, camera_name): def confidence(self, confidence): self._confidence = confidence - @correspondence.setter - def correspondence(self, correspondence): - self._correspondence = correspondence - @features.setter def features(self, features): self._features = features - @feature_type.setter - def feature_type(self, feature_type): - self._feature_type = feature_type - @feature_edges.setter def feature_edges(self, feature_edges): self._feature_edges = feature_edges @@ -347,7 +362,13 @@ def supervision_mask(self, supervision_mask): def use_for_training(self, use_for_training): self._use_for_training = use_for_training - def save(self, output_path: str, index: int, graph_only: bool = False, previous_node: Optional[BaseNode] = None): + def save( + self, + output_path: str, + index: int, + graph_only: bool = False, + previous_node: Optional[BaseNode] = None, + ): if self._feature_positions is not None: graph_data = self.as_pyg_data(previous_node) path = os.path.join(output_path, "graph", f"graph_{index:06d}.pt") @@ -362,10 +383,17 @@ def save(self, output_path: str, index: int, graph_only: bool = False, previous_ p = path.replace("graph", "seg") torch.save(self._feature_segments.cpu(), p) - def project_footprint(self, footprint: torch.tensor, color: torch.tensor = torch.FloatTensor([1.0, 1.0, 1.0])): - mask, image_overlay, projected_points, valid_points = self._image_projector.project_and_render( - self._pose_cam_in_world[None], footprint, color - ) + def project_footprint( + self, + footprint: torch.tensor, + color: torch.tensor = torch.FloatTensor([1.0, 1.0, 1.0]), + ): + ( + mask, + image_overlay, + projected_points, + valid_points, + ) = self._image_projector.project_and_render(self._pose_cam_in_world[None], footprint, color) return mask, image_overlay, projected_points, valid_points @@ -412,11 +440,12 @@ def update_supervision_signal(self): self._supervision_signal_valid = self._supervision_signal > 0 -class ProprioceptionNode(BaseNode): +class SupervisionNode(BaseNode): """Local node stores all the information required for traversability estimation and debugging - All the information matches a real frame that must be respected to keep consistency""" + All the information matches a real frame that must be respected to keep consistency + """ - _name = "proprioception_node" + _name = "supervision_node" def __init__( self, @@ -429,7 +458,7 @@ def __init__( length: float = 0.1, width: float = 0.1, height: float = 0.1, - proprioception: torch.tensor = None, + supervision: torch.tensor = None, traversability: torch.tensor = torch.FloatTensor([0.0]), traversability_var: torch.tensor = torch.FloatTensor([1.0]), is_untraversable: bool = False, @@ -449,7 +478,7 @@ def __init__( self._length = length self._width = width self._height = height - self._proprioceptive_state = proprioception + self._supervision_state = supervision self._traversability = traversability self._traversability_var = traversability_var self._is_untraversable = is_untraversable @@ -465,17 +494,24 @@ def change_device(self, device): self._pose_footprint_in_world = self._pose_footprint_in_world.to(device) self._twist_in_base = self._twist_in_base.to(device) self._desired_twist_in_base = self._desired_twist_in_base.to(device) - self._proprioceptive_state = self._proprioceptive_state.to(device) + self._supervision_state = self._supervision_state.to(device) def get_bounding_box_points(self): - return make_box(self._length, self._width, self._height, pose=self._pose_base_in_world, grid_size=5).to( - self._pose_base_in_world.device - ) + return make_box( + self._length, + self._width, + self._height, + pose=self._pose_base_in_world, + grid_size=5, + ).to(self._pose_base_in_world.device) def get_footprint_points(self): - return make_plane(x=self._length, y=self._width, pose=self._pose_footprint_in_world, grid_size=25).to( - self._pose_footprint_in_world.device - ) + return make_plane( + x=self._length, + y=self._width, + pose=self._pose_footprint_in_world, + grid_size=25, + ).to(self._pose_footprint_in_world.device) def get_side_points(self): return make_plane(x=0.0, y=self._width, pose=self._pose_footprint_in_world, grid_size=2).to( @@ -495,7 +531,11 @@ def get_untraversable_plane(self, grid_size=5): # Prepare transformation of plane in base frame rho = torch.FloatTensor( - [0.5 * self._length * motion_direction[0], 0.5 * self._length * motion_direction[1], -self._height / 2] + [ + 0.5 * self._length * motion_direction[0], + 0.5 * self._length * motion_direction[1], + -self._height / 2, + ] ) # Translation vector (x, y, z) phi = torch.FloatTensor([0.0, 0.0, z_angle]) # roll-pitch-yaw R_BP = SO3.from_rpy(phi) @@ -503,9 +543,12 @@ def get_untraversable_plane(self, grid_size=5): pose_plane_in_world = self._pose_base_in_world @ pose_plane_in_base # Pose of plane in world frame # Make plane - return make_dense_plane(y=0.5 * self._width, z=self._height, pose=pose_plane_in_world, grid_size=grid_size).to( - device - ) + return make_dense_plane( + y=0.5 * self._width, + z=self._height, + pose=pose_plane_in_world, + grid_size=grid_size, + ).to(device) def make_footprint_with_node(self, other: BaseNode, grid_size: int = 10): if self.is_untraversable: @@ -559,8 +602,8 @@ def pose_footprint_in_world(self): return self._pose_footprint_in_world @property - def propropioceptive_state(self): - return self._proprioceptive_state + def supervision_state(self): + return self._supervision_state @traversability.setter def traversability(self, traversability): @@ -571,7 +614,7 @@ def traversability_var(self, variance): self._traversability_var = variance def is_valid(self): - return isinstance(self._proprioceptive_state, torch.Tensor) + return isinstance(self._supervision_state, torch.Tensor) class TwistNode(BaseNode): diff --git a/wild_visual_navigation/traversability_estimator/traversability_estimator.py b/wild_visual_navigation/traversability_estimator/traversability_estimator.py index e8bbbd0b..7d2aebf9 100644 --- a/wild_visual_navigation/traversability_estimator/traversability_estimator.py +++ b/wild_visual_navigation/traversability_estimator/traversability_estimator.py @@ -1,34 +1,31 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation.feature_extractor import FeatureExtractor from wild_visual_navigation.image_projector import ImageProjector -from wild_visual_navigation.learning.model import get_model +from wild_visual_navigation.model import get_model from wild_visual_navigation.cfg import ExperimentParams -from wild_visual_navigation.utils import Timer, accumulate_time +from pytictac import accumulate_time from wild_visual_navigation.traversability_estimator import ( BaseGraph, DistanceWindowGraph, MissionNode, - ProprioceptionNode, + SupervisionNode, MaxElementsGraph, ) from wild_visual_navigation.utils import WVNMode -from wild_visual_navigation.learning.utils import TraversabilityLoss -from wild_visual_navigation.utils import make_polygon_from_points +from wild_visual_navigation.utils import TraversabilityLoss, AnomalyLoss from wild_visual_navigation.visu import LearningVisualizer -from wild_visual_navigation.utils import KLTTracker, KLTTrackerOpenCV - -from wild_visual_navigation import WVN_ROOT_DIR from pytorch_lightning import seed_everything -from torch_geometric.data import Data, Batch +from wild_visual_navigation.utils import Data, Batch from threading import Lock -import dataclasses import os import pickle -import time import torch -import torch.nn.functional as F import torchvision.transforms as transforms -from torchmetrics import ROC to_tensor = transforms.ToTensor() @@ -37,37 +34,26 @@ class TraversabilityEstimator: def __init__( self, params: ExperimentParams, - scale_traversability: bool, - device: str = "cuda", - max_distance: float = 3, - image_size: int = 448, - image_distance_thr: float = None, - proprio_distance_thr: float = None, - segmentation_type: str = "slic", - feature_type: str = "dino", - optical_flow_estimator_type: str = "none", - min_samples_for_training: int = 10, - vis_node_index: int = 10, - mode: bool = False, - extraction_store_folder=None, - **kwargs, + device: str, + max_distance: float, + image_distance_thr: float, + supervision_distance_thr: float, + min_samples_for_training: int, + vis_node_index: int, + mode: bool, + extraction_store_folder, + anomaly_detection: bool, ): self._device = device self._mode = mode self._extraction_store_folder = extraction_store_folder self._min_samples_for_training = min_samples_for_training self._vis_node_index = vis_node_index - self._scale_traversability = scale_traversability self._params = params - self._scale_traversability_threshold = 0 - - if self._scale_traversability: - # Use 500 bins for constant memory usuage - self._auxiliary_training_roc = ROC(task="binary", thresholds=5000) - self._auxiliary_training_roc.to(self._device) + self._anomaly_detection = anomaly_detection # Local graphs - self._proprio_graph = DistanceWindowGraph(max_distance=max_distance, edge_distance=proprio_distance_thr) + self._supervision_graph = DistanceWindowGraph(max_distance=max_distance, edge_distance=supervision_distance_thr) # Experience graph if mode == WVNMode.EXTRACT_LABELS: @@ -78,29 +64,12 @@ def __init__( # Visualization node self._vis_mission_node = None - # Feature extractor - self._segmentation_type = segmentation_type - self._feature_type = feature_type - - self._feature_extractor = FeatureExtractor( - self._device, - segmentation_type=self._segmentation_type, - feature_type=self._feature_type, - input_size=image_size, - **kwargs, - ) - # Optical flow - self._optical_flow_estimator_type = optical_flow_estimator_type - - if optical_flow_estimator_type == "sparse": - self._optical_flow_estimator = KLTTrackerOpenCV(device=self._device) - # Mutex self._learning_lock = Lock() self._pause_training = False self._pause_mission_graph = False - self._pause_proprio_graph = False + self._pause_supervision_graph = False # Visualization self._visualizer = LearningVisualizer() @@ -108,21 +77,30 @@ def __init__( # Lightning module seed_everything(42) - self._exp_cfg = dataclasses.asdict(self._params) - self._model = get_model(self._exp_cfg["model"]).to(self._device) + self._model = get_model(self._params.model).to(self._device) self._model.train() - self._traversability_loss = TraversabilityLoss( - **self._exp_cfg["loss"], - model=self._model, - log_enabled=self._exp_cfg["general"]["log_confidence"], - log_folder=self._exp_cfg["general"]["model_path"], - ) - self._traversability_loss.to(self._device) + if self._anomaly_detection: + self._traversability_loss = AnomalyLoss( + **self._params["loss_anomaly"], + log_enabled=self._params["general"]["log_confidence"], + log_folder=self._params["general"]["model_path"], + ) + self._traversability_loss.to(self._device) + + else: + self._traversability_loss = TraversabilityLoss( + **self._params["loss"], + model=self._model, + log_enabled=self._params["general"]["log_confidence"], + log_folder=self._params["general"]["model_path"], + ) + self._traversability_loss.to(self._device) - self._optimizer = torch.optim.Adam(self._model.parameters(), lr=self._exp_cfg["optimizer"]["lr"]) + self._optimizer = torch.optim.Adam(self._model.parameters(), lr=self._params["optimizer"]["lr"]) self._loss = torch.tensor([torch.inf]) self._step = 0 + self._debug_info_node_count = 0 torch.set_grad_enabled(True) @@ -141,48 +119,6 @@ def __setstate__(self, state: dict): def reset(self): print("[WARNING] Resetting the traversability estimator is not fully tested") - # with self._learning_lock: - # self._pause_training = True - # self._pause_mission_graph = True - # self._pause_proprio_graph = True - # time.sleep(2.0) - - # self._proprio_graph.clear() - # self._mission_graph.clear() - - # # Reset all the learning stuff - # self._step = 0 - # self._loss = torch.tensor([torch.inf]) - - # # Re-create model - # self._exp_cfg = dataclasses.asdict(self._params) - # self._model = get_model(self._exp_cfg["model"]).to(self._device) - # self._model.train() - - # # Re-create optimizer - # self._optimizer = torch.optim.Adam(self._model.parameters(), lr=self._exp_cfg["optimizer"]["lr"]) - - # # Re-create traversability loss - # self._traversability_loss = TraversabilityLoss( - # **self._exp_cfg["loss"], - # model=self._model, - # log_enabled=self._exp_cfg["general"]["log_confidence"], - # log_folder=self._exp_cfg["general"]["model_path"], - # ) - # self._traversability_loss.to(self._device) - - # # Resume training - # self._pause_training = False - # self._pause_mission_graph = False - # self._pause_proprio_graph = False - - @property - def scale_traversability_threshold(self): - return self._scale_traversability_threshold - - @scale_traversability_threshold.setter - def scale_traversability_threshold(self, scale_traversability_threshold): - self._scale_traversability_threshold = scale_traversability_threshold @property def loss(self): @@ -207,44 +143,12 @@ def change_device(self, device: str): Args: device (str): new device """ - self._proprio_graph.change_device(device) + self._supervision_graph.change_device(device) self._mission_graph.change_device(device) - self._feature_extractor.change_device(device) self._model = self._model.to(device) - if self._optical_flow_estimator_type != "none": - self._optical_flow_estimator = self._optical_flow_estimator.to(device) - - if self._scale_traversability: - # Use 500 bins for constant memory usuage - self._auxiliary_training_roc.to(device) - - @accumulate_time - def update_features(self, node: MissionNode): - """Extracts visual features from a node that stores an image - Args: - node (MissionNode): new node in the mission graph - """ - if self._mode != WVNMode.EXTRACT_LABELS: - # Extract features - # Check do we need to add here the .clone() in - edges, feat, seg, center = self._feature_extractor.extract(img=node.image[None], return_centers=True) - - # Set features in node - node.feature_type = self._feature_extractor.feature_type - node.features = feat - node.feature_edges = edges - node.feature_segments = seg - node.feature_positions = center - - @accumulate_time - def update_prediction(self, node: MissionNode): - data = Data(x=node.features, edge_index=node.feature_edges) - with torch.inference_mode(): - with self._learning_lock: - node.prediction = self._model(data) - reco_loss = F.mse_loss(node.prediction[:, 1:], node.features, reduction="none").mean(dim=1) - node.confidence = self._traversability_loss._confidence_generator.inference_without_update(reco_loss) + if self._use_feature_extractor: + self._feature_extractor.change_device(device) @accumulate_time def update_visualization_node(self): @@ -258,116 +162,6 @@ def update_visualization_node(self): self._vis_mission_node = self._mission_graph.get_nodes()[-self._vis_node_index] - @accumulate_time - def add_correspondences_dense_optical_flow(self, node: MissionNode, previous_node: MissionNode, debug=False): - flow_previous_to_current = self._optical_flow_estimator.forward(previous_node.image.clone(), node.image.clone()) - grid_x, grid_y = torch.meshgrid( - torch.arange(0, previous_node.image.shape[1], device=self._device, dtype=torch.float32), - torch.arange(0, previous_node.image.shape[2], device=self._device, dtype=torch.float32), - indexing="ij", - ) - positions = torch.stack([grid_x, grid_y]) - positions += flow_previous_to_current - positions = positions.type(torch.long) - start_seg = torch.unique(previous_node.feature_segments) - - previous = [] - current = [] - for el in start_seg: - m = previous_node.feature_segments == el - candidates = positions[:, m] - m = ( - (candidates[0, :] >= 0) - * (candidates[0, :] < m.shape[0]) - * (candidates[1, :] >= 0) - * (candidates[1, :] < m.shape[1]) - ) - if m.sum() == 0: - continue - candidates = candidates[:, m] - res = node.feature_segments[candidates[0, :], candidates[1, :]] - previous.append(el) - current.append(torch.unique(res, sorted=True)[0]) - - if len(current) != 0: - current = torch.stack(current) - previous = torch.stack(previous) - correspondence = torch.stack([previous, current], dim=1) - node.correspondence = correspondence - - if debug: - from wild_visual_navigation import WVN_ROOT_DIR - from wild_visual_navigation.visu import LearningVisualizer - - visu = LearningVisualizer(p_visu=os.path.join(WVN_ROOT_DIR, "results/test_visu"), store=True) - visu.plot_correspondence_segment( - seg_prev=previous_node.feature_segments, - seg_current=node.feature_segments, - img_prev=previous_node.image, - img_current=node.image, - center_prev=previous_node.feature_positions, - center_current=node.feature_positions, - correspondence=node.correspondence, - tag="centers", - ) - - visu.plot_optical_flow( - flow=flow_previous_to_current, img1=previous_node.image, img2=node.image, tag="flow", s=50 - ) - - @accumulate_time - def add_correspondences_sparse_optical_flow(self, node: MissionNode, previous_node: MissionNode, debug=False): - # Transform previous_nodes feature locations into current image using KLT - pre_pos = previous_node.feature_positions - cur_pos = self._optical_flow_estimator( - previous_node.feature_positions[:, 0].clone(), - previous_node.feature_positions[:, 1].clone(), - previous_node.image, - node.image, - ) - cur_pos = torch.stack(cur_pos, dim=1) - # Use forward propagated cluster centers to get segment index of current image - - # Only compute for forward propagated centers that fall onto current image plane - m = ( - (cur_pos[:, 0] >= 0) - * (cur_pos[:, 1] >= 0) - * (cur_pos[:, 0] < node.image.shape[1]) - * (cur_pos[:, 1] < node.image.shape[2]) - ) - - # Can enumerate previous segments and use mask to get segment index - cor_pre = torch.arange(previous_node.feature_positions.shape[0], device=self._device)[m] - # cor_pre = pre[m] - - # Check feature_segmentation mask to index correct segment index - cur_pos = cur_pos[m].type(torch.long) - cor_cur = node.feature_segments[cur_pos[:, 1], cur_pos[:, 0]] - node.correspondence = torch.stack([cor_pre, cor_cur], dim=1) - - if debug: - from wild_visual_navigation import WVN_ROOT_DIR - from wild_visual_navigation.visu import LearningVisualizer - - visu = LearningVisualizer(p_visu=os.path.join(WVN_ROOT_DIR, "results/test_visu"), store=True) - visu.plot_sparse_optical_flow( - pre_pos, - cur_pos, - img1=previous_node.image, - img2=node.image, - tag="flow", - ) - visu.plot_correspondence_segment( - seg_prev=previous_node.feature_segments, - seg_current=node.feature_segments, - img_prev=previous_node.image, - img_current=node.image, - center_prev=previous_node.feature_positions, - center_current=node.feature_positions, - correspondence=node.correspondence, - tag="centers", - ) - @accumulate_time def add_mission_node(self, node: MissionNode, verbose: bool = False): """Adds a node to the mission graph to images and training info @@ -379,12 +173,6 @@ def add_mission_node(self, node: MissionNode, verbose: bool = False): if self._pause_mission_graph: return False - # Compute image features - self.update_features(node) - - # Get last added node - previous_node = self._mission_graph.get_last_node() - # Add image node success = self._mission_graph.add_node(node) @@ -395,49 +183,36 @@ def add_mission_node(self, node: MissionNode, verbose: bool = False): s += " " * (48 - len(s)) + f"total nodes [{total_nodes}]" if verbose: print(s) - - if self._mode != WVNMode.EXTRACT_LABELS: - # Set optical flow - if self._optical_flow_estimator_type == "dense" and previous_node is not None: - raise Exception("Not working") - self.add_correspondences_dense_optical_flow(node, previous_node, debug=False) - - elif self._optical_flow_estimator_type == "sparse" and previous_node is not None: - self.add_correspondences_sparse_optical_flow(node, previous_node, debug=False) - + h, w = node._feature_segments.shape[0], node._feature_segments.shape[1] # Project past footprints on current image - supervision_mask = torch.ones(node.image.shape).to(self._device) * torch.nan + supervision_mask = torch.ones((3, h, w)).to(self._device) * torch.nan # Finally overwrite the current mask node.supervision_mask = supervision_mask node.update_supervision_signal() - if self._mode == WVNMode.EXTRACT_LABELS: - p = os.path.join(self._extraction_store_folder, "image", str(node.timestamp).replace(".", "_") + ".pt") - torch.save(node.image, p) - return True - else: - return False + + return False @accumulate_time @torch.no_grad() - def add_proprio_node(self, pnode: ProprioceptionNode): - """Adds a node to the proprioceptive graph to store proprioception + def add_supervision_node(self, pnode: SupervisionNode): + """Adds a node to the supervision graph to store supervision Args: - node (BaseNode): new node in the proprioceptive graph + node (BaseNode): new node in the supervision graph """ - if self._pause_proprio_graph: + if self._pause_supervision_graph: return False # If the node is not valid, we do nothing if not pnode.is_valid(): return False - # Get last added proprio node - last_pnode = self._proprio_graph.get_last_node() - success = self._proprio_graph.add_node(pnode) + # Get last added supervision node + last_pnode = self._supervision_graph.get_last_node() + success = self._supervision_graph.add_node(pnode) if not success: # Update traversability of latest node @@ -446,6 +221,7 @@ def add_proprio_node(self, pnode: ProprioceptionNode): return False else: + # If the previous node doesn't exist or it's invalid, we do nothing if last_pnode is None or not last_pnode.is_valid(): return False @@ -460,9 +236,19 @@ def add_proprio_node(self, pnode: ProprioceptionNode): if (not hasattr(last_mission_node, "supervision_mask")) or (last_mission_node.supervision_mask is None): return False + for j, ele in enumerate( + list(self._mission_graph._graph.nodes._nodes.items())[self._debug_info_node_count :] + ): + node, values = ele + if last_mission_node.timestamp - values["timestamp"] > 30: + node.clear_debug_data() + self._debug_info_node_count += 1 + else: + break + # Get all mission nodes within a range mission_nodes = self._mission_graph.get_nodes_within_radius_range( - last_mission_node, 0, self._proprio_graph.max_distance + last_mission_node, 0, self._supervision_graph.max_distance ) if len(mission_nodes) < 1: @@ -473,7 +259,6 @@ def add_proprio_node(self, pnode: ProprioceptionNode): # New implementation B = len(mission_nodes) - # Prepare batches K = torch.eye(4, device=self._device).repeat(B, 1, 1) supervision_masks = torch.zeros(last_mission_node.supervision_mask.shape, device=self._device).repeat( @@ -488,9 +273,7 @@ def add_proprio_node(self, pnode: ProprioceptionNode): K[i] = mnode.image_projector.camera.intrinsics pose_camera_in_world[i] = mnode.pose_cam_in_world - if (not hasattr(mnode, "supervision_mask")) or (mnode.supervision_mask is None): - continue - else: + if not ((not hasattr(mnode, "supervision_mask")) or (mnode.supervision_mask is None)): supervision_masks[i] = mnode.supervision_mask im = ImageProjector(K, H, W) @@ -519,8 +302,8 @@ def add_proprio_node(self, pnode: ProprioceptionNode): def get_mission_nodes(self): return self._mission_graph.get_nodes() - def get_proprio_nodes(self): - return self._proprio_graph.get_nodes() + def get_supervision_nodes(self): + return self._supervision_graph.get_nodes() def get_last_valid_mission_node(self): last_valid_node = None @@ -530,10 +313,6 @@ def get_last_valid_mission_node(self): return last_valid_node def get_mission_node_for_visualization(self): - # print(f"get_mission_node_for_visualization: {self._vis_mission_node}") - # if self._vis_mission_node is not None: - # print(f" has image {hasattr(self._vis_mission_node, 'image')}") - # print(f" has supervision_mask {hasattr(self._vis_mission_node, 'supervision_mask')}") return self._vis_mission_node def save(self, mission_path: str, filename: str): @@ -586,7 +365,12 @@ def save_graph(self, mission_path: str, export_debug: bool = False): i = 0 for node in mission_nodes: if node.is_valid(): - node.save(mission_path, i, graph_only=False, previous_node=self._mission_graph.get_previous_node(node)) + node.save( + mission_path, + i, + graph_only=False, + previous_node=self._mission_graph.get_previous_node(node), + ) i += 1 self._pause_training = False @@ -645,39 +429,19 @@ def load_checkpoint(self, checkpoint_path: str): self._pause_training = False @accumulate_time - def make_batch(self, batch_size: int = 8): + def make_batch( + self, + batch_size: int = 8, + ): """Samples a batch from the mission_graph Args: batch_size (int): Size of the batch """ - if self._optical_flow_estimator_type != "none": - raise ValueError( - "Currently not supported the auxilary graph implementation is not needed and the features and edge index of previous node should be added to the mission graph directly." - ) - # Sample a batch of nodes and their previous node, for temporal consistency - mission_nodes = self._mission_graph.get_n_random_valid_nodes(n=batch_size) - ls = [ - x.as_pyg_data(self._mission_graph.get_previous_node(x)) - for x in mission_nodes - if x.correspondence is not None - ] - - ls_aux = [self._mission_graph.get_previous_node(x).as_pyg_data(aux=True) for x in mission_nodes] - - # Make sure to only use nodes with valid correspondences - while len(ls) < batch_size: - mn = self._mission_graph.get_n_random_valid_nodes(n=1)[0] - if mn.correspondence is not None: - ls.append(mn.as_pyg_data(self._mission_graph.get_previous_node(mn))) - ls_aux.append(self._mission_graph.get_previous_node(mn).as_pyg_data(aux=True)) - - batch = Batch.from_data_list(ls) - else: - # Just sample N random nodes - mission_nodes = self._mission_graph.get_n_random_valid_nodes(n=batch_size) - batch = Batch.from_data_list([x.as_pyg_data() for x in mission_nodes]) + # Just sample N random nodes + mission_nodes = self._mission_graph.get_n_random_valid_nodes(n=batch_size) + batch = Batch.from_data_list([x.as_pyg_data(anomaly_detection=self._anomaly_detection) for x in mission_nodes]) return batch @@ -691,51 +455,46 @@ def train(self): if self._pause_training: return {} - return_dict = {} - if self._mission_graph.get_num_valid_nodes() > self._min_samples_for_training: + num_valid_nodes = self._mission_graph.get_num_valid_nodes() + return_dict = {"mission_graph_num_valid_node": num_valid_nodes} + if num_valid_nodes > self._min_samples_for_training: # Prepare new batch - graph = self.make_batch(self._exp_cfg["ablation_data_module"]["batch_size"]) - - with self._learning_lock: - # Forward pass - res = self._model(graph) - - log_step = (self._step % 20) == 0 - self._loss, loss_aux = self._traversability_loss(graph, res, step=self._step, log_step=log_step) - - # Keep track of ROC during training for rescaling the loss when publishing - if self._scale_traversability: - # This mask should contain all the segments corrosponding to trees. - mask_anomaly = loss_aux["confidence"] < 0.5 - mask_proprioceptive = graph.y == 1 - # Remove the segments that are for sure not an anomalies given that we have walked on them. - mask_anomaly[mask_proprioceptive] = False - # Elements are valid if they are either an anomaly or we have walked on them to fit the ROC - mask_valid = mask_anomaly | mask_proprioceptive - self._auxiliary_training_roc(res[mask_valid, 0], graph.y[mask_valid].type(torch.long)) - return_dict["scale_traversability_threshold"] = self._scale_traversability_threshold - # Backprop - self._optimizer.zero_grad() - self._loss.backward() - self._optimizer.step() - - # Print losses - if log_step: - loss_trav = loss_aux["loss_trav"] - loss_reco = loss_aux["loss_reco"] - print( - f"step: {self._step} | loss: {self._loss:5f} | loss_trav: {loss_trav:5f} | loss_reco: {loss_reco:5f}" - ) + graph = self.make_batch(self._params["ablation_data_module"]["batch_size"]) + if graph is not None: + with self._learning_lock: + # Forward pass + + res = self._model(graph) + + log_step = (self._step % 20) == 0 + self._loss, loss_aux, trav = self._traversability_loss( + graph, res, step=self._step, log_step=log_step + ) + + # Backprop + self._optimizer.zero_grad() + self._loss.backward() + self._optimizer.step() + + # Print losses + if log_step: + loss_trav = loss_aux["loss_trav"] + loss_reco = loss_aux["loss_reco"] + print( + f"step: {self._step} | loss: {self._loss.item():5f} | loss_trav: {loss_trav.item():5f} | loss_reco: {loss_reco.item():5f}" + ) + + # Update steps + self._step += 1 - # Update steps - self._step += 1 + # Return loss + return_dict["loss_total"] = self._loss.item() + return_dict["loss_trav"] = loss_aux["loss_trav"].item() + return_dict["loss_reco"] = loss_aux["loss_reco"].item() - # Return loss - return_dict["loss_total"] = self._loss.item() - return_dict["loss_trav"] = loss_aux["loss_trav"].item() - return_dict["loss_reco"] = loss_aux["loss_reco"].item() - return return_dict - return {"loss_total": -1} + return return_dict + return_dict["loss_total"] = -1 + return return_dict @accumulate_time def plot_mission_node_prediction(self, node: MissionNode): diff --git a/wild_visual_navigation/utils/__init__.py b/wild_visual_navigation/utils/__init__.py index a60f90ca..b44f8835 100644 --- a/wild_visual_navigation/utils/__init__.py +++ b/wild_visual_navigation/utils/__init__.py @@ -1,9 +1,30 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from .data import Data, Batch +from .flatten_dict import flatten_dict +from .get_logger import get_logger, get_neptune_run +from .loading import load_yaml, file_path, save_omega_cfg +from .create_experiment_folder import create_experiment_folder +from .get_confidence import get_confidence from .kalman_filter import KalmanFilter -from .meshes import make_box, make_rounded_box, make_ellipsoid, make_plane, make_polygon_from_points, make_dense_plane -from .timing import Timer, accumulate_time, SystemLevelTimer, SystemLevelContextTimer -from .klt_tracker import KLTTracker, KLTTrackerOpenCV from .confidence_generator import ConfidenceGenerator +from .meshes import ( + make_box, + make_rounded_box, + make_ellipsoid, + make_plane, + make_polygon_from_points, + make_dense_plane, +) from .operation_modes import WVNMode -from .dataset_info import perugia_dataset, ROOT_DIR -from .override_params import override_params -from .gpu_monitor import GpuMonitor, SystemLevelGpuMonitor, SystemLevelContextGpuMonitor, accumulate_memory +from .gpu_monitor import ( + GpuMonitor, + SystemLevelGpuMonitor, + SystemLevelContextGpuMonitor, + accumulate_memory, +) +from .loss import TraversabilityLoss, AnomalyLoss +from .testing import load_test_image, get_dino_transform, make_results_folder diff --git a/wild_visual_navigation/utils/confidence_generator.py b/wild_visual_navigation/utils/confidence_generator.py index ec6bc69a..1d920c5b 100644 --- a/wild_visual_navigation/utils/confidence_generator.py +++ b/wild_visual_navigation/utils/confidence_generator.py @@ -1,21 +1,27 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation.utils import KalmanFilter +from wild_visual_navigation import WVN_ROOT_DIR import torch import os +from collections import deque class ConfidenceGenerator(torch.nn.Module): def __init__( self, - std_factor: float = 0.7, - method: str = "running_mean", + std_factor, + method, log_enabled: bool = False, - log_folder: str = "/tmp", + log_folder: str = f"{WVN_ROOT_DIR}/results", ): """Returns a confidence value for each number Args: - std_factor (float, optional): _description_. Defaults to 2.0. - device (str, optional): _description_. Defaults to "cpu". + std_factor (float, optional): _description_. Defaults to 0.7. """ super(ConfidenceGenerator, self).__init__() self.std_factor = std_factor @@ -51,21 +57,36 @@ def __init__( self.running_n = torch.nn.Parameter(running_n, requires_grad=False) self.running_sum = torch.nn.Parameter(running_sum, requires_grad=False) self.running_sum_of_squares = torch.nn.Parameter(running_sum_of_squares, requires_grad=False) + + # self.running_n = running_n.to("cuda") + # self.running_sum = running_sum.to("cuda") + # self.running_sum_of_squares = running_sum_of_squares.to("cuda") + self._update = self.update_running_mean self._reset = self.reset_running_mean - elif method == "latest_measurment": - self._update = self.update_latest_measurment - self._reset = self.reset_latest_measurment + elif method == "latest_measurement": + self._update = self.update_latest_measurement + self._reset = self.reset_latest_measurement + elif method == "moving_average": + window_size = 5 + self.data_window = deque(maxlen=window_size) + self._update = self.update_moving_average + self._reset = self.reset_moving_average else: raise ValueError("Unknown method") - def update_latest_measurment(self, x: torch.Tensor, x_positive: torch.Tensor): + def update_latest_measurement(self, x: torch.Tensor, x_positive: torch.Tensor): # Then the confidence is computed as the distance to the center of the Gaussian given factor*sigma self.mean[0] = x_positive.mean() self.std[0] = x_positive.std() return self.inference_without_update(x) - def reset_latest_measurment(self, x: torch.Tensor, x_positive: torch.Tensor): + def reset_latest_measurement(self, x: torch.Tensor, x_positive: torch.Tensor): + self.mean[0] = 0 + self.var[0] = 1 + self.std[0] = 1 + + def reset_moving_average(self, x: torch.Tensor, x_positive: torch.Tensor): self.mean[0] = 0 self.var[0] = 1 self.std[0] = 1 @@ -81,9 +102,29 @@ def update_running_mean(self, x: torch.tensor, x_positive: torch.tensor): self.var[0] = self.running_sum_of_squares / self.running_n - self.mean**2 self.std[0] = torch.sqrt(self.var) - # Then the confidence is computed as the distance to the center of the Gaussian given factor*sigma - confidence = torch.exp(-(((x - self.mean) / (self.std * self.std_factor)) ** 2) * 0.5) - confidence[x < self.mean] = 1.0 + if x.device != self.mean.device: + return torch.zeros_like(x) + + shifted_mean = self.mean + self.std * self.std_factor + std_fac = 1 + interval_min = max(shifted_mean - std_fac * self.std, 0) + interval_max = shifted_mean + std_fac * self.std + x = torch.clip(x, interval_min, interval_max) + confidence = 1 - ((x - interval_min) / (interval_max - interval_min)) + + return confidence.type(torch.float32) + + def update_moving_average(self, x: torch.tensor, x_positive: torch.tensor): + self.data_window.append(x_positive) + + data_window_tensor = list(self.data_window) + data_window_tensor = torch.cat(data_window_tensor, dim=0) + + self.mean[0] = data_window_tensor.mean() + self.std[0] = data_window_tensor.std() + + x = torch.clip(x, self.mean - 2 * self.std, self.mean + 2 * self.std) + confidence = (x - torch.min(x)) / (torch.max(x) - torch.min(x)) return confidence.type(torch.float32) @@ -94,7 +135,7 @@ def update_kalman_filter(self, x: torch.tensor, x_positive: torch.tensor): self.var[0, 0] = var[0, 0] self.mean[0] = mean[0] - assert torch.isnan(self.mean).any() == False, "Nan Value in mean detected" + assert not torch.isnan(self.mean).any(), "Nan Value in mean detected" self.std[0] = torch.sqrt(self.var)[0, 0] # Then the confidence is computed as the distance to the center of the Gaussian given factor*sigma @@ -103,7 +144,13 @@ def update_kalman_filter(self, x: torch.tensor, x_positive: torch.tensor): return confidence.type(torch.float32) - def update(self, x: torch.tensor, x_positive: torch.tensor, step: int, log_step: bool = False): + def update( + self, + x: torch.tensor, + x_positive: torch.tensor, + step: int, + log_step: bool = False, + ): """Input a tensor with multiple error predictions. Returns the estimated confidence score within 2 standard deviations based on the running mean and variance. @@ -121,17 +168,28 @@ def update(self, x: torch.tensor, x_positive: torch.tensor, step: int, log_step: with torch.no_grad(): torch.save( - {"x": x.cpu(), "x_positive": x_positive.cpu(), "mean": self.mean.cpu(), "std": self.std.cpu()}, + { + "x": x.cpu(), + "x_positive": x_positive.cpu(), + "mean": self.mean.cpu(), + "std": self.std.cpu(), + }, os.path.join(base_folder, f"samples_{step:06}.pt"), ) return output def inference_without_update(self, x: torch.tensor): + if x.device != self.mean.device: return torch.zeros_like(x) + shifted_mean = self.mean + self.std * self.std_factor + std_fac = 1 + interval_min = max(shifted_mean - std_fac * self.std, 0) + interval_max = shifted_mean + std_fac * self.std + x = torch.clip(x, interval_min, interval_max) + confidence = 1 - ((x - interval_min) / (interval_max - interval_min)) - confidence = torch.exp(-(((x - self.mean) / (self.std * self.std_factor)) ** 2) * 0.5) return confidence.type(torch.float32) def forward(self, x: torch.tensor): @@ -150,15 +208,13 @@ def reset_kalman_filter(self): self.var[0] = 1 self.std[0] = 1 + def get_dict(self): + return {"mean": self.mean, "var": self.var, "std": self.std} + if __name__ == "__main__": - cg = ConfidenceGenerator() - for i in range(100000): - inp = ( - torch.rand( - 10, - ) - * 10 - ) - res = cg.update(inp, inp) - print("inp ", inp, " res ", res, "std", cg.std) + cg = ConfidenceGenerator(std_factor=0.5, method="latest_measurement") + for i in range(1000): + inp = torch.rand(10) * 10 + res = cg.update(inp, inp, step=i) + # print("inp ", inp, " res ", res, "std", cg.std) diff --git a/wild_visual_navigation/learning/utils/create_experiment_folder.py b/wild_visual_navigation/utils/create_experiment_folder.py similarity index 53% rename from wild_visual_navigation/learning/utils/create_experiment_folder.py rename to wild_visual_navigation/utils/create_experiment_folder.py index 64f0b70f..a879fd8b 100644 --- a/wild_visual_navigation/learning/utils/create_experiment_folder.py +++ b/wild_visual_navigation/utils/create_experiment_folder.py @@ -1,47 +1,48 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import datetime import os import shutil from pathlib import Path from pytorch_lightning.utilities import rank_zero_only from wild_visual_navigation.cfg import ExperimentParams -from typing import Union +from omegaconf import read_write +from wild_visual_navigation import WVN_ROOT_DIR @rank_zero_only -def create_experiment_folder(exp: Union[dict, ExperimentParams], env: dict) -> str: +def create_experiment_folder(exp: ExperimentParams) -> str: """Creates an experiment folder if rank=0 with optional unique timestamp. Inplace sets the correct model_path in the experiment cfg. + Updateds the results path correctly Args: - exp (dict or ExperimentParams): Experiment cfg - env (dict): Environment cfg + exp (ExperimentParams): Experiment cfg Returns: str: model_path of the run """ - if isinstance(exp, ExperimentParams): - name = exp.general.name - timestamp = exp.general.timestamp - else: - name = exp["general"]["name"] - timestamp = exp["general"]["timestamp"] + name = exp.general.name + timestamp = exp.general.timestamp + + if not os.path.isabs(exp.env.results): + with read_write(exp): + exp.env.results = os.path.join(WVN_ROOT_DIR, exp.env.results) # Set in name the correct model path if timestamp: timestamp = datetime.datetime.now().replace(microsecond=0).isoformat() - model_path = os.path.join(env["base"], name) + model_path = os.path.join(exp.env.results, name) p = model_path.split("/") model_path = os.path.join("/", *p[:-1], str(timestamp).replace(":", "-") + "_" + p[-1]) else: - model_path = os.path.join(env["base"], name) + model_path = os.path.join(exp.env.results, name) shutil.rmtree(model_path, ignore_errors=True) # Create the directory Path(model_path).mkdir(parents=True, exist_ok=True) - if isinstance(exp, ExperimentParams): - exp.general.model_path = model_path - else: - exp["general"]["model_path"] = model_path - return model_path diff --git a/wild_visual_navigation/utils/data.py b/wild_visual_navigation/utils/data.py new file mode 100644 index 00000000..a6aa28af --- /dev/null +++ b/wild_visual_navigation/utils/data.py @@ -0,0 +1,73 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from typing import List +from typing_extensions import Self +import torch + + +class Data: + def __init__(self, **kwargs): + for k, v in kwargs.items(): + setattr(self, k, v) + + +class Batch: + def __init__(self): + pass + + @classmethod + def from_data_list(cls, list_of_data: List[Data]) -> Self: + if len(list_of_data) == 0: + return None + + base = ["x"] + + tensors_to_concatenate = [ + k for k in dir(list_of_data[0]) if k[0] != "_" and getattr(list_of_data[0], k) is not None and not k in base + ] + base = base + tensors_to_concatenate + + for k in base: + if k == "edge_index": + ls = [] + for j, data in enumerate(list_of_data): + ls.append(getattr(data, k) + cls.ptr[j]) + + cls.edge_index = torch.cat(ls, dim=-1) + else: + + if k == "x": + running = 0 + ptrs = [running] + batches = [] + + for j, data in enumerate(list_of_data): + running = running + getattr(data, k).shape[0] + ptrs.append(running) + batches += [j] * int(getattr(data, k).shape[0]) + + cls.ptr = torch.tensor(ptrs, dtype=torch.long) + cls.batch = torch.tensor(batches, dtype=torch.long) + + setattr(cls, k, torch.cat([getattr(data, k) for data in list_of_data], dim=0)) + + cls.ba = cls.x.shape[0] + return cls + + +if __name__ == "__main__": + from torch_geometric.data import Data as DataTorchGeometric + from torch_geometric.data import Batch as BatchTorchGeometric + + edge_index = torch.tensor([[0, 1, 1, 2], [1, 0, 2, 1]], dtype=torch.long) + x = torch.tensor([[-1], [0], [1]], dtype=torch.float) + data_tg1 = DataTorchGeometric(x=x, edge_index=edge_index) + data1 = Data(x=x, edge_index=edge_index) + edge_index2 = torch.tensor([[0, 1, 1, 2], [1, 0, 2, 1]], dtype=torch.long) + x2 = torch.tensor([[-1], [12], [1]], dtype=torch.float) + data_tg2 = DataTorchGeometric(x=x2, edge_index=edge_index2) + data2 = Data(x=x2, edge_index=edge_index2) + batch = BatchTorchGeometric.from_data_list([data_tg1, data_tg2]) diff --git a/wild_visual_navigation/learning/utils/flatten_dict.py b/wild_visual_navigation/utils/flatten_dict.py similarity index 82% rename from wild_visual_navigation/learning/utils/flatten_dict.py rename to wild_visual_navigation/utils/flatten_dict.py index 0446b895..6e788635 100644 --- a/wild_visual_navigation/learning/utils/flatten_dict.py +++ b/wild_visual_navigation/utils/flatten_dict.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import collections __all__ = ["flatten_dict"] diff --git a/wild_visual_navigation/learning/utils/get_confidence.py b/wild_visual_navigation/utils/get_confidence.py similarity index 57% rename from wild_visual_navigation/learning/utils/get_confidence.py rename to wild_visual_navigation/utils/get_confidence.py index 3594aa7c..dee10778 100644 --- a/wild_visual_navigation/learning/utils/get_confidence.py +++ b/wild_visual_navigation/utils/get_confidence.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import torch.nn.functional as F import torch diff --git a/wild_visual_navigation/utils/get_logger.py b/wild_visual_navigation/utils/get_logger.py new file mode 100644 index 00000000..c1154622 --- /dev/null +++ b/wild_visual_navigation/utils/get_logger.py @@ -0,0 +1,127 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from pytorch_lightning.loggers.neptune import NeptuneLogger +from pytorch_lightning.loggers import TensorBoardLogger, WandbLogger +from wild_visual_navigation.utils import flatten_dict +import inspect +import os +import neptune + +__all__ = [ + "get_neptune_logger", + "get_wandb_logger", + "get_tensorboard_logger", + "get_neptune_run", +] + +PROXIES = {"http": "http://proxy.ethz.ch:3128", "https": "http://proxy.ethz.ch:3128"} + + +def get_neptune_run(neptune_project_name: str, tags: [str]) -> any: + """Get neptune run + + Args: + neptune_project_name (str): Neptune project name + tags (list of str): Tags to identify the project + """ + proxies = None + if os.environ.get("ENV_WORKSTATION_NAME", "default") == "euler": + proxies = PROXIES + + run = neptune.init( + api_token=os.environ["NEPTUNE_API_TOKEN"], + project=neptune_project_name, + tags=[os.environ.get("ENV_WORKSTATION_NAME", "default")] + tags, + proxies=proxies, + ) + return run + + +def get_neptune_logger(exp: dict) -> NeptuneLogger: + """Returns NeptuneLogger + + Args: + exp (dict): Content of environment file + Returns: + (logger): Logger + """ + project_name = exp.logger.neptune_project_name # Neptune AI project_name "username/project" + + params = flatten_dict(exp) # noqa: F841 + + name_full = exp.general.name + name_short = "__".join(name_full.split("/")[-2:]) + + proxies = None + if os.environ.get("ENV_WORKSTATION_NAME", "default") == "euler": + proxies = PROXIES + + return NeptuneLogger( + api_key=os.environ["NEPTUNE_API_TOKEN"], + project=project_name, + name=name_short, + tags=[ + os.environ.get("ENV_WORKSTATION_NAME", "default"), + name_full.split("/")[-2], + name_full.split("/")[-1], + ], + proxies=proxies, + ) + + +def get_wandb_logger(exp: dict) -> WandbLogger: + """Returns NeptuneLogger + + Args: + exp (dict): Content of environment file + + Returns: + (logger): Logger + """ + project_name = exp.logger.wandb_project_name # project_name (str): W&B project_name + save_dir = os.path.join(exp.general.model_path) # save_dir (str): File path to save directory + params = flatten_dict(exp) # noqa: F841 + name_full = exp.general.name + name_short = "__".join(name_full.split("/")[-2:]) + return WandbLogger( + name=name_short, + project=project_name, + entity=exp.logger.wandb_entity, + save_dir=save_dir, + offline=False, + ) + + +def get_tensorboard_logger(exp: dict) -> TensorBoardLogger: + """Returns TensorboardLoggers + + Args: + exp (dict): Content of environment file + + Returns: + (logger): Logger + """ + params = flatten_dict(exp) + return TensorBoardLogger(save_dir=exp.name, name="tensorboard", default_hp_metric=params) + + +def get_skip_logger(exp: dict) -> None: + """Returns None + + Args: + exp (dict): Content of environment file + + Returns: + (logger): Logger + """ + return None + + +def get_logger(exp: dict) -> any: + name = exp.logger.name + save_dir = os.path.join(exp.env.folder, exp.general.name) # noqa: F841 + register = {k: v for k, v in globals().items() if inspect.isfunction(v)} + return register[f"get_{name}_logger"](exp) diff --git a/wild_visual_navigation/utils/gpu_monitor.py b/wild_visual_navigation/utils/gpu_monitor.py index c2c00031..a553349e 100644 --- a/wild_visual_navigation/utils/gpu_monitor.py +++ b/wild_visual_navigation/utils/gpu_monitor.py @@ -1,6 +1,10 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import torch import os -import numpy as np import pandas as pd from functools import wraps @@ -27,7 +31,6 @@ def pynvml_gpu_memory_query(device, pid): def jtop_gpu_memory_query(device, pid): - from jtop import jtop import psutil process = psutil.Process(pid) @@ -181,7 +184,15 @@ def toc(self): class SystemLevelGpuMonitor: - def __init__(self, objects, names, enabled=True, device=None, store_samples=False, skip_n_samples=1) -> None: + def __init__( + self, + objects, + names, + enabled=True, + device=None, + store_samples=False, + skip_n_samples=1, + ) -> None: self.objects = objects self.names = names @@ -211,7 +222,7 @@ def store(self, folder): if hasattr(o, "slg_memory_samples"): # Each object has a slg_memory_samples dict # Each entry of the dict is a numpy array with samples - for (k, v) in o.slg_memory_samples.items(): + for k, v in o.slg_memory_samples.items(): out_filename = f"{base_folder}/{n}_{k}.csv" print(f" {out_filename}") df = pd.DataFrame(v) @@ -219,7 +230,6 @@ def store(self, folder): if __name__ == "__main__": - print("GPU memory measuring using context manager") tensors = [] @@ -263,7 +273,12 @@ def test2(self, s): t = 0.1 my_test = MyTest() gpu_monitor = SystemLevelGpuMonitor( - objects=[my_test], names=["test"], enabled=True, device="cuda", store_samples=True, skip_n_samples=1 + objects=[my_test], + names=["test"], + enabled=True, + device="cuda", + store_samples=True, + skip_n_samples=1, ) for n in range(400): step = n diff --git a/wild_visual_navigation/utils/kalman_filter.py b/wild_visual_navigation/utils/kalman_filter.py index fe049511..1486cb05 100644 --- a/wild_visual_navigation/utils/kalman_filter.py +++ b/wild_visual_navigation/utils/kalman_filter.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import torch import torch.nn as nn @@ -138,7 +143,13 @@ def run_kalman_filter(): kf1.init_meas_model(meas_model=torch.eye(1), meas_cov=torch.eye(1) * 2) # Outlier-robust KF - kf2 = KalmanFilter(dim_state=1, dim_control=1, dim_meas=1, outlier_rejection="huber", outlier_delta=0.5) + kf2 = KalmanFilter( + dim_state=1, + dim_control=1, + dim_meas=1, + outlier_rejection="huber", + outlier_delta=0.5, + ) kf2.init_process_model(proc_model=torch.eye(1) * 1, proc_cov=torch.eye(1) * 0.5) kf2.init_meas_model(meas_model=torch.eye(1), meas_cov=torch.eye(1) * 2) diff --git a/wild_visual_navigation/learning/utils/loading.py b/wild_visual_navigation/utils/loading.py similarity index 61% rename from wild_visual_navigation/learning/utils/loading.py rename to wild_visual_navigation/utils/loading.py index 7a94f4cf..e31da012 100644 --- a/wild_visual_navigation/learning/utils/loading.py +++ b/wild_visual_navigation/utils/loading.py @@ -1,6 +1,13 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from wild_visual_navigation import WVN_ROOT_DIR + import os import yaml -from wild_visual_navigation import WVN_ROOT_DIR + __all__ = ["file_path", "load_yaml"] @@ -40,18 +47,11 @@ def load_yaml(path: str) -> dict: return res -def load_env() -> dict: - """Uses ENV_WORKSTATION_NAME variable to load specified environment yaml file. - - Returns: - (dict): Returns content of environment file +def save_omega_cfg(cfg, path): + """ + Args: + cfg (omegaconf): Cfg file + path (str): File path """ - env_cfg_path = os.path.join(WVN_ROOT_DIR, "cfg/env", os.environ["ENV_WORKSTATION_NAME"] + ".yaml") - env = load_yaml(env_cfg_path) - for k in env.keys(): - if k == "workstation": - continue - if not os.path.isabs(env[k]): - env[k] = os.path.join(WVN_ROOT_DIR, env[k]) - - return env + with open(path, "rb") as file: + OmegaConf.save(config=cfg, f=file) diff --git a/wild_visual_navigation/learning/utils/loss.py b/wild_visual_navigation/utils/loss.py similarity index 55% rename from wild_visual_navigation/learning/utils/loss.py rename to wild_visual_navigation/utils/loss.py index f68ce8d5..1e6343d9 100644 --- a/wild_visual_navigation/learning/utils/loss.py +++ b/wild_visual_navigation/utils/loss.py @@ -1,25 +1,72 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from wild_visual_navigation.utils import ConfidenceGenerator + import torch.nn.functional as F -from torch_geometric.data import Data +from wild_visual_navigation.utils import Data + import torch from typing import Optional -from wild_visual_navigation.utils import ConfidenceGenerator from torch import nn -from torchmetrics import ROC, AUROC, Accuracy + + +class AnomalyLoss(nn.Module): + def __init__( + self, + confidence_std_factor: float, + method: str, + log_enabled: bool, + log_folder: str, + ): + super(AnomalyLoss, self).__init__() + + self._confidence_generator = ConfidenceGenerator( + std_factor=confidence_std_factor, method=method, log_enabled=log_enabled, log_folder=log_folder + ) + + def forward( + self, + graph: Optional[Data], + res: dict, + update_generator: bool = True, + step: int = 0, + log_step: bool = False, + ): + loss_aux = {} + loss_aux["loss_trav"] = torch.tensor([0.0]) + loss_aux["loss_reco"] = torch.tensor([0.0]) + + losses = res["logprob"].sum(1) + res["log_det"] # Sum over all channels, resulting in h*w output dimensions + + if update_generator: + confidence = self._confidence_generator.update( + x=-losses.clone().detach(), x_positive=-losses.clone().detach(), step=step + ) + + loss_aux["confidence"] = confidence + + return -torch.mean(losses), loss_aux, confidence + + def update_node_confidence(self, node): + node.confidence = 0 class TraversabilityLoss(nn.Module): def __init__( self, - w_trav, - w_reco, - w_temp, - anomaly_balanced, - model, - method, - confidence_std_factor, + w_trav: float, + w_reco: float, + w_temp: float, + anomaly_balanced: bool, + model: nn.Module, + method: str, + confidence_std_factor: float, + log_enabled: bool, + log_folder: str, trav_cross_entropy=False, - log_enabled: bool = False, - log_folder: str = "/tmp", ): # TODO remove trav_cross_entropy default param when running in online mode super(TraversabilityLoss, self).__init__() @@ -36,10 +83,7 @@ def __init__( self._trav_loss_func = F.mse_loss self._confidence_generator = ConfidenceGenerator( - std_factor=confidence_std_factor, - method=method, - log_enabled=log_enabled, - log_folder=log_folder, + std_factor=confidence_std_factor, method=method, log_enabled=log_enabled, log_folder=log_folder ) def reset(self): @@ -47,7 +91,12 @@ def reset(self): self._confidence_generator.reset() def forward( - self, graph: Data, res: torch.Tensor, update_generator: bool = True, step: int = 0, log_step: bool = False + self, + graph: Data, + res: torch.Tensor, + update_generator: bool = True, + step: int = 0, + log_step: bool = False, ): # Compute reconstruction loss nr_channel_reco = graph.x.shape[1] @@ -56,7 +105,10 @@ def forward( with torch.no_grad(): if update_generator: confidence = self._confidence_generator.update( - x=loss_reco, x_positive=loss_reco[graph.y_valid], step=step, log_step=log_step + x=loss_reco, + x_positive=loss_reco[graph.y_valid], + step=step, + log_step=log_step, ) else: confidence = self._confidence_generator.inference_without_update(x=loss_reco) @@ -65,7 +117,9 @@ def forward( if self._trav_cross_entropy: label = label.type(torch.long) loss_trav_raw = self._trav_loss_func( - res[:, :-nr_channel_reco].squeeze()[:, 0], label.type(torch.float32), reduction="none" + res[:, :-nr_channel_reco].squeeze()[:, 0], + label.type(torch.float32), + reduction="none", ) else: loss_trav_raw = self._trav_loss_func(res[:, :-nr_channel_reco].squeeze(), label, reduction="none") @@ -104,3 +158,7 @@ def forward( }, res_updated, ) + + def update_node_confidence(self, node): + reco_loss = F.mse_loss(node.prediction[:, 1:], node.features, reduction="none").mean(dim=1) + node.confidence = self._confidence_generator.inference_without_update(reco_loss) diff --git a/wild_visual_navigation/utils/meshes.py b/wild_visual_navigation/utils/meshes.py index ad12512f..383e4596 100644 --- a/wild_visual_navigation/utils/meshes.py +++ b/wild_visual_navigation/utils/meshes.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import torch from kornia.geometry.linalg import transform_points @@ -72,15 +77,30 @@ def make_ellipsoid(length, width, height, pose=torch.eye(4), grid_size=11): def make_plane(x=None, y=None, z=None, pose=torch.eye(4), grid_size=10): if x is None: points = torch.FloatTensor( - [[0.0, y / 2, z / 2], [0.0, -y / 2, z / 2], [0.0, -y / 2, -z / 2], [0.0, y / 2, -z / 2]] + [ + [0.0, y / 2, z / 2], + [0.0, -y / 2, z / 2], + [0.0, -y / 2, -z / 2], + [0.0, y / 2, -z / 2], + ] ).to(pose.device) elif y is None: points = torch.FloatTensor( - [[x / 2, 0.0, z / 2], [x / 2, 0.0, -z / 2], [-x / 2, 0.0, -z / 2], [-x / 2, 0.0, z / 2]] + [ + [x / 2, 0.0, z / 2], + [x / 2, 0.0, -z / 2], + [-x / 2, 0.0, -z / 2], + [-x / 2, 0.0, z / 2], + ] ).to(pose.device) elif z is None: points = torch.FloatTensor( - [[x / 2, y / 2, 0.0], [x / 2, -y / 2, 0.0], [-x / 2, -y / 2, 0.0], [-x / 2, y / 2, 0.0]] + [ + [x / 2, y / 2, 0.0], + [x / 2, -y / 2, 0.0], + [-x / 2, -y / 2, 0.0], + [-x / 2, y / 2, 0.0], + ] ).to(pose.device) else: raise "make_plane requires just 2 inputs to be set" diff --git a/wild_visual_navigation/utils/operation_modes.py b/wild_visual_navigation/utils/operation_modes.py index 776e4475..6fd761e3 100644 --- a/wild_visual_navigation/utils/operation_modes.py +++ b/wild_visual_navigation/utils/operation_modes.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from enum import Enum diff --git a/wild_visual_navigation/utils/override_params.py b/wild_visual_navigation/utils/override_params.py deleted file mode 100644 index 3cc686dc..00000000 --- a/wild_visual_navigation/utils/override_params.py +++ /dev/null @@ -1,11 +0,0 @@ -import dataclasses - - -def override_params(dc, exp): - for k, v in exp.items(): - if hasattr(dc, k): - if dataclasses.is_dataclass(getattr(dc, k)): - setattr(dc, k, override_params(getattr(dc, k), v)) - else: - setattr(dc, k, v) - return dc diff --git a/wild_visual_navigation/utils/testing.py b/wild_visual_navigation/utils/testing.py new file mode 100644 index 00000000..03ef26a1 --- /dev/null +++ b/wild_visual_navigation/utils/testing.py @@ -0,0 +1,34 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +import os +import cv2 +import torch +from torchvision import transforms as T +from wild_visual_navigation import WVN_ROOT_DIR + + +def load_test_image(): + np_img = cv2.imread(os.path.join(WVN_ROOT_DIR, "assets/images/forest_clean.png")) + img = torch.from_numpy(cv2.cvtColor(np_img, cv2.COLOR_BGR2RGB)) + img = img.permute(2, 0, 1) + img = (img.type(torch.float32) / 255)[None] + return img + + +def get_dino_transform(): + transform = T.Compose( + [ + T.Resize(224, T.InterpolationMode.NEAREST), + T.CenterCrop(224), + ] + ) + return transform + + +def make_results_folder(name): + path = os.path.join(WVN_ROOT_DIR, "results", name) + os.makedirs(path, exist_ok=True) + return path diff --git a/wild_visual_navigation/utils/timing.py b/wild_visual_navigation/utils/timing.py deleted file mode 100644 index 0a8bfd26..00000000 --- a/wild_visual_navigation/utils/timing.py +++ /dev/null @@ -1,204 +0,0 @@ -import torch -import time -import copy -import pickle -import os -from functools import wraps - - -class CpuTimer: - def __init__(self, name="") -> None: - self.name = name - - def __enter__(self): - self.tic() - - def __exit__(self, exc_type, exc_value, exc_tb): - print(f"Time {self.name}: ", self.toc(), "ms") - - def tic(self): - self.start = time.perf_counter() - - def toc(self): - self.end_time = time.perf_counter() - return self.end - self.start - - -class Timer: - def __init__(self, name="") -> None: - self.name = name - self.start = torch.cuda.Event(enable_timing=True) - self.end = torch.cuda.Event(enable_timing=True) - - def __enter__(self): - self.tic() - - def __exit__(self, exc_type, exc_value, exc_tb): - print(f"Time {self.name}: ", self.toc(), "ms") - - def tic(self): - self.start.record() - - def toc(self): - self.end.record() - torch.cuda.synchronize() - return self.start.elapsed_time(self.end) - - -def accumulate_time(method): - @wraps(method) - def timed(*args, **kw): - if not hasattr(args[0], "slt_enabled"): - return method(*args, **kw) - elif not args[0].slt_enabled: - return method(*args, **kw) - - start = torch.cuda.Event(enable_timing=True) - end = torch.cuda.Event(enable_timing=True) - start.record() - result = method(*args, **kw) - end.record() - torch.cuda.synchronize() - - st = start.elapsed_time(end) - if not hasattr(args[0], "slt_time_summary"): - args[0].slt_time_summary = {} - args[0].slt_n_summary = {} - args[0].slt_n_level = {} - args[0].slt_time_squared_summary = {} - - if method.__name__ in args[0].slt_time_summary: - args[0].slt_time_summary[method.__name__] += st - args[0].slt_time_squared_summary[method.__name__] += st**2 - args[0].slt_n_summary[method.__name__] += 1 - else: - args[0].slt_time_summary[method.__name__] = st - args[0].slt_time_squared_summary[method.__name__] = st**2 - args[0].slt_n_summary[method.__name__] = 1 - args[0].slt_n_level[method.__name__] = 0 - - return result - - return timed - - -class SystemLevelContextTimer: - def __init__(self, parent, name="") -> None: - self.parent = parent - if not hasattr(self.parent, "slt_enabled"): - return - elif not self.parent.slt_enabled: - return - - self.name = name - self.start = torch.cuda.Event(enable_timing=True) - self.end = torch.cuda.Event(enable_timing=True) - - def __enter__(self): - if not hasattr(self.parent, "slt_enabled"): - return - elif not self.parent.slt_enabled: - return - - self.tic() - - def __exit__(self, exc_type, exc_value, exc_tb): - if not hasattr(self.parent, "slt_enabled"): - return - elif not self.parent.slt_enabled: - return - - st = self.toc() - - if not hasattr(self.parent, "slt_time_summary"): - self.parent.slt_time_summary = {} - self.parent.slt_time_squared_summary = {} - self.parent.slt_n_summary = {} - self.parent.slt_n_level = {} - - if self.name in self.parent.slt_time_summary: - self.parent.slt_time_summary[self.name] += st - self.parent.slt_time_squared_summary[self.name] += st**2 - self.parent.slt_n_summary[self.name] += 1 - else: - self.parent.slt_time_summary[self.name] = st - self.parent.slt_time_squared_summary[self.name] = st**2 - self.parent.slt_n_summary[self.name] = 1 - self.parent.slt_n_level[self.name] = 1 - - def tic(self): - self.start.record() - - def toc(self): - self.end.record() - torch.cuda.synchronize() - return self.start.elapsed_time(self.end) - - -class SystemLevelTimer: - def __init__(self, objects, names, enabled=True) -> None: - self.objects = objects - self.names = names - - for o in self.objects: - o.slt_enabled = enabled - - def __str__(self): - s = "" - for n, o in zip(self.names, self.objects): - if hasattr(o, "slt_time_summary"): - s += n - for (k, v) in o.slt_time_summary.items(): - n = o.slt_n_summary[k] - spacing = int(o.slt_n_level[k] * 5) - mean = v / n - std = round(((o.slt_time_squared_summary[k] / n) - (mean**2)) ** 0.5, 3) - s += ( - "\n +" - + "-" * spacing - + f"- {k}:".ljust(35 - spacing) - + f"{round(v,2)}ms".ljust(18) - + f"counts: {n} ".ljust(18) - + f"std: {std} ".ljust(18) - + f"mean: {round(mean,3)} ".ljust(18) - ) - s += "\n" - return s - - def store(self, folder, key="system_level_timer"): - res = {} - for n, o in zip(self.names, self.objects): - if hasattr(o, "slt_time_summary"): - mean = {k: o.slt_time_summary[k] / o.slt_n_summary[k] for k in o.slt_time_summary.keys()} - std = { - k: (o.slt_time_squared_summary[k] / o.slt_n_summary[k] - mean[k] ** 2) ** 0.5 - for k in o.slt_time_summary.keys() - } - store = [o.slt_n_level, o.slt_time_summary, std, mean] - res[n] = copy.deepcopy(store) - - with open(os.path.join(folder, key + ".pkl"), "wb") as f: - pickle.dump(res, f, protocol=pickle.HIGHEST_PROTOCOL) - - s = self.__str__() - with open(os.path.join(folder, key + ".txt"), "w") as f: - for l in s.split("\n"): - f.write(l + "\n") - - -if __name__ == "__main__": - - print("Start timing using context manager") - - with Timer("Test1"): - s1, s2 = 1000, 1000 - a = torch.zeros((s1, s2)) - for x in range(s1): - for y in range(s2): - a[x, y] = x * y - with Timer("Test2"): - s1, s2 = 1000, 1000 - a = torch.zeros((s1, s2)) - for x in range(s1): - for y in range(s2): - a[y, x] = x * y diff --git a/wild_visual_navigation/visu/__init__.py b/wild_visual_navigation/visu/__init__.py index be43c69e..e6172047 100644 --- a/wild_visual_navigation/visu/__init__.py +++ b/wild_visual_navigation/visu/__init__.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from .paper_colors import ( paper_colors_rgb_u8, paper_colors_rgba_u8, diff --git a/wild_visual_navigation/visu/image_functionality.py b/wild_visual_navigation/visu/image_functionality.py index 77b92211..ef4ba477 100644 --- a/wild_visual_navigation/visu/image_functionality.py +++ b/wild_visual_navigation/visu/image_functionality.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# # TODO: Jonas doc strings import os import imageio @@ -54,7 +59,7 @@ def wrap(*args, **kwargs): ds = cv2.resize(img, dsize=(int(W / 2), int(H / 2)), interpolation=cv2.INTER_CUBIC) try: # logger == neptuneai - from neptune.new.types import File + from neptune.types import File args[0]._pl_model.logger.experiment[tag].log(File.as_image(np.float32(ds) / 255), step=epoch) except Exception: diff --git a/wild_visual_navigation/visu/paper_colors.py b/wild_visual_navigation/visu/paper_colors.py index 44d26463..c5b934e9 100644 --- a/wild_visual_navigation/visu/paper_colors.py +++ b/wild_visual_navigation/visu/paper_colors.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# paper_colors_rgb_u8 = { "orange": (251, 151, 39), "mangenta": (150, 36, 145), diff --git a/wild_visual_navigation/visu/plotting.py b/wild_visual_navigation/visu/plotting.py index 7df721a7..c3559716 100644 --- a/wild_visual_navigation/visu/plotting.py +++ b/wild_visual_navigation/visu/plotting.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from matplotlib.backends.backend_agg import FigureCanvasAgg from PIL import Image import numpy as np diff --git a/wild_visual_navigation/visu/visualizer.py b/wild_visual_navigation/visu/visualizer.py index 80bae295..d67fbc35 100644 --- a/wild_visual_navigation/visu/visualizer.py +++ b/wild_visual_navigation/visu/visualizer.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Jonas Frey, Matias Mattamala. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# # TODO: Jonas doc strings, rework visualiation functions import os @@ -15,11 +20,10 @@ matplotlib.use("Agg") from wild_visual_navigation.visu import image_functionality -from wild_visual_navigation.learning.utils import get_confidence +from wild_visual_navigation.utils import get_confidence from wild_visual_navigation.visu import get_img_from_fig -from wild_visual_navigation.visu import paper_colors_rgb_u8, paper_colors_rgba_u8 -from wild_visual_navigation.visu import paper_colors_rgb_f, paper_colors_rgba_f -from wild_visual_navigation.utils import Timer, accumulate_time +from wild_visual_navigation.visu import paper_colors_rgb_f +from pytictac import Timer, accumulate_time __all__ = ["LearningVisualizer"] @@ -85,7 +89,7 @@ def plot_roc(self, x, y, y_lower=None, y_upper=None, title=None, y_tag=None, **k sns.set_style("darkgrid") fig, ax = plt.subplots(figsize=(3, 3)) - l = len(x) + l = len(x) # noqa: E741 assert len(y) == l assert len(y_lower) == l assert len(y_upper) == l @@ -99,7 +103,13 @@ def plot_roc(self, x, y, y_lower=None, y_upper=None, title=None, y_tag=None, **k if not (_y_lower is None): ax.plot(_x, _y_lower, color=paper_colors_rgb_f[k + "_light"], alpha=0.1) ax.plot(_x, _y_upper, color=paper_colors_rgb_f[k + "_light"], alpha=0.1) - ax.fill_between(_x, _y_lower, _y_upper, color=paper_colors_rgb_f[k + "_light"], alpha=0.2) + ax.fill_between( + _x, + _y_lower, + _y_upper, + color=paper_colors_rgb_f[k + "_light"], + alpha=0.2, + ) ax.plot(np.linspace(0, 1, 100), np.linspace(0, 1, 100), linestyle="--", color="gray") ax.set_xlabel("False positive rate") @@ -221,7 +231,7 @@ def plot_traversability_graph_on_seg( boundary_seg=seg, draw_bound=False, ) - i2 = (torch.from_numpy(i1).type(torch.float32) / 255).permute(2, 0, 1) + i2 = (torch.from_numpy(i1).type(torch.float32) / 255).permute(2, 0, 1) # noqa: F841 # Plot Graph on Image return i1 @@ -239,7 +249,15 @@ def plot_traversability_graph_on_seg( @accumulate_time @image_functionality def plot_traversability_graph( - self, prediction, graph, center, img, max_val=1.0, colormap="RdYlBu", colorize_invalid_centers=False, **kwargs + self, + prediction, + graph, + center, + img, + max_val=1.0, + colormap="RdYlBu", + colorize_invalid_centers=False, + **kwargs, ): """Plot prediction on graph @@ -355,15 +373,35 @@ def plot_detectron_classification( overlay_mask=None, **kwargs, ): - cmap = cm.get_cmap("RdYlBu", 256) - cmap = np.concatenate([cmap(np.linspace(0, 0.3, 128)), cmap(np.linspace(0.7, 1.0, 128))]) - cmap = torch.from_numpy(cmap).to(seg)[:, :3] + if kwargs.get("cmap", None): + cmap = kwargs["cmap"] + else: + s = 0.3 # If bigger, get more fine-grained green, if smaller get more fine-grained red + cmap = cm.get_cmap("RdYlBu", 256) # or RdYlGn + cmap = np.concatenate( + [cmap(np.linspace(0, s, 128)), cmap(np.linspace(1 - s, 1.0, 128))] + ) # Stretch the colormap + cmap = torch.from_numpy(cmap).to(seg)[:, :3] img = self.plot_image(img, not_log=True) seg_img = self.plot_segmentation( - (seg * 255).type(torch.long).clip(0, 255), max_seg=256, colormap=cmap, store=False, not_log=True + (seg * 255).type(torch.long).clip(0, 255), + max_seg=256, + colormap=cmap, + store=False, + not_log=True, ) + # plt.hist(seg_img.ravel(), bins=500) + # # Get current ros time + # now = rospy.Time.now() + # # Create a unique filename + # filename = f"{now.secs}_{now.nsecs}.png" + # # Save the figure + # plt.savefig(f"/home/rschmid/overlays/{filename}") + # # Close the figure + # plt.close() + H, W = img.shape[:2] back = np.zeros((H, W, 4)) back[:, :, :3] = img @@ -527,14 +565,19 @@ def plot_optical_flow(self, flow: torch.Tensor, img1: torch.Tensor, img2: torch. dv = (flow[1, u, v] + i2.shape[1]).item() try: draw.line([(v, u), (v + dv, u + du)], fill=col, width=2) - except: + except Exception: pass return np.array(pil_img).astype(np.uint8) @accumulate_time @image_functionality def plot_sparse_optical_flow( - self, pre_pos: torch.Tensor, cur_pos: torch.Tensor, img1: torch.Tensor, img2: torch.Tensor, **kwargs + self, + pre_pos: torch.Tensor, + cur_pos: torch.Tensor, + img1: torch.Tensor, + img2: torch.Tensor, + **kwargs, ): """Draws line connection between to images based on estimated flow @@ -557,8 +600,15 @@ def plot_sparse_optical_flow( col = (0, 255, 0) for p, c in zip(pre_pos, cur_pos): try: - draw.line([(p[0].item(), p[1].item()), ((i2.shape[1] + c[0]).item(), c[1].item())], fill=col, width=2) - except: + draw.line( + [ + (p[0].item(), p[1].item()), + ((i2.shape[1] + c[0]).item(), c[1].item()), + ], + fill=col, + width=2, + ) + except Exception: pass return np.array(pil_img).astype(np.uint8) @@ -685,7 +735,13 @@ def plot_sparse_optical_flow( with Timer("plot_detectron_quick"): for i in range(N): visu.plot_detectron( - img=img, seg=seg, store=store, max_seg=ele, tag="5_quick", not_log=not_log, draw_bound=False + img=img, + seg=seg, + store=store, + max_seg=ele, + tag="5_quick", + not_log=not_log, + draw_bound=False, ) print("Start seg") diff --git a/wild_visual_navigation_anymal/CMakeLists.txt b/wild_visual_navigation_anymal/CMakeLists.txt index 8af4e78e..c0be04f0 100644 --- a/wild_visual_navigation_anymal/CMakeLists.txt +++ b/wild_visual_navigation_anymal/CMakeLists.txt @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# cmake_minimum_required(VERSION 3.0.2) project(wild_visual_navigation_anymal) set (CMAKE_CXX_STANDARD 14) @@ -5,7 +10,6 @@ set (CMAKE_CXX_STANDARD 14) set(CATKIN_PACKAGE_LIST rospy roscpp - anymal_msgs sensor_msgs std_msgs wild_visual_navigation_msgs @@ -29,13 +33,16 @@ include_directories( ${catkin_INCLUDE_DIRS} ) -# Declare node -add_executable(anymal_msg_converter_cpp_node - src/anymal_msg_converter_cpp_node.cpp) +if(BUILD_ANYMAL) + find_package( anymal_msgs ) + # Declare node + add_executable( anymal_msg_converter_cpp_node + src/anymal_msg_converter_cpp_node.cpp) -target_link_libraries(anymal_msg_converter_cpp_node + target_link_libraries(anymal_msg_converter_cpp_node ${catkin_LIBRARIES} -) + ) +endif() ############# ## Install ## diff --git a/wild_visual_navigation_anymal/README.md b/wild_visual_navigation_anymal/README.md new file mode 100644 index 00000000..292b50b4 --- /dev/null +++ b/wild_visual_navigation_anymal/README.md @@ -0,0 +1,15 @@ +Setup on ANYmal: + +NPC: +``` +sudo apt-get install ros-noetic-anymal-msgs-dev +catkin build wild_visual_navigation_anymal + +catkin build wild_visual_navigation_anymal --cmake-args -DBUILD_ANYMAL=1 +``` + +Jetson: +``` +``` + +Currently the resizing is not working which is pretty bad for the wide angle camera. \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_parameters.yaml b/wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_parameters.yaml similarity index 89% rename from wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_parameters.yaml rename to wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_parameters.yaml index b275acf0..73959ac4 100644 --- a/wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_parameters.yaml +++ b/wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_parameters.yaml @@ -55,42 +55,53 @@ enable_normal_color: false # If true, the map contains 'col #### Traversability filter ######## use_chainer: false # Use chainer as a backend of traversability filter or pytorch. If false, it uses pytorch. pytorch requires ~2GB more GPU memory compared to chainer but runs faster. -weight_file: '$(rospack find elevation_mapping_cupy)/config/weights.dat' # Weight file for traversability filter +weight_file: '$(rospack find elevation_mapping_cupy)/config/core/weights.dat' # Weight file for traversability filter #### Upper bound ######## use_only_above_for_upper_bound: false +#### Initializer ######## +initialize_method: 'linear' # Choose one from 'nearest', 'linear', 'cubic' +initialize_frame_id: ['RF_FOOT', 'LF_FOOT', 'RH_FOOT', 'LH_FOOT'] # One tf (like ['footprint'] ) initializes a square around it. +initialize_tf_offset: [0.0, 0.0, 0.0, 0.0] # z direction. Should be same number as initialize_frame_id. +dilation_size_initialize: 5 # dilation size after the init. +initialize_tf_grid_size: 0.4 # This is not used if number of tf is more than 3. +use_initializer_at_start: true # Use initializer when the node starts. + #### Plugins ######## -plugin_config_file: '$(rospack find wild_visual_navigation_ros)/config/elevation_mapping_cupy/anymal_plugin_config.yaml' +plugin_config_file: '$(rospack find wild_visual_navigation_anymal)/config/elevation_mapping_cupy/anymal_plugin_config.yaml' + +pointcloud_channel_fusions: + rgb: 'color' + default: 'average' + +image_channel_fusions: + rgb: 'color' + default: 'exponential' + feat_.*: 'exponential' + sem_.*: 'exponential' + #### Publishers ######## # topic_name: # layers: # Choose from 'elevation', 'variance', 'traversability', 'time', 'normal_x', 'normal_y', 'normal_z', 'color', plugin_layer_names # basic_layers: # basic_layers for valid cell computation (e.g. Rviz): Choose a subset of `layers`. # fps: # Publish rate. Use smaller value than `map_acquire_fps`. + publishers: elevation_map_raw: - layers: ['elevation', 'traversability', 'variance'] + layers: ['elevation', 'traversability', 'variance', 'rgb'] basic_layers: ['elevation', 'traversability'] fps: 5.0 - semantic_map_raw: - layers: ['elevation', 'traversability', 'visual_traversability', 'elevation_with_semantic'] + semantic_map: + layers: ['elevation', 'traversability', 'visual_traversability', 'visual_confidence', 'elevation_with_semantics', 'rgb'] basic_layers: ['elevation', 'traversability'] fps: 5.0 - # elevation_map_recordable: - # layers: ['elevation', 'traversability'] - # basic_layers: ['elevation', 'traversability'] - # fps: 2.0 - # elevation_map_filter: - # layers: ['min_filter', 'smooth', 'inpaint', 'upper_bound','rgb'] - # basic_layers: ['min_filter'] - # fps: 3.0 -#### Initializer ######## -initialize_method: 'linear' # Choose one from 'nearest', 'linear', 'cubic' -initialize_frame_id: ['RF_FOOT', 'LF_FOOT', 'RH_FOOT', 'LH_FOOT'] # One tf (like ['footprint'] ) initializes a square around it. -initialize_tf_offset: [0.0, 0.0, 0.0, 0.0] # z direction. Should be same number as initialize_frame_id. -dilation_size_initialize: 5 # dilation size after the init. -initialize_tf_grid_size: 0.4 # This is not used if number of tf is more than 3. -use_initializer_at_start: true # Use initializer when the node starts. + local_planning_map: + layers: ['elevation', 'traversability', 'visual_traversability'] + basic_layers: ['elevation', 'traversability'] + fps: 5.0 + + \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_plugin_config.yaml b/wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_plugin_config.yaml similarity index 91% rename from wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_plugin_config.yaml rename to wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_plugin_config.yaml index 20ef3f98..9e41a17c 100644 --- a/wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_plugin_config.yaml +++ b/wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_plugin_config.yaml @@ -29,10 +29,10 @@ inpainting: method: "telea" # telea or ns # Apply inpainting using opencv -elevation_semantic: +mask_elevation_semantics: enable: True fill_nan: False is_height_layer: True - layer_name: "elevation_with_semantic" + layer_name: "elevation_with_semantics" extra_params: - method: "telea" \ No newline at end of file + semantic_reference_layer_name: "visual_traversability" \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_sensor_parameter.yaml b/wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_sensor_parameter.yaml new file mode 100644 index 00000000..bad469c6 --- /dev/null +++ b/wild_visual_navigation_anymal/config/elevation_mapping_cupy/anymal_sensor_parameter.yaml @@ -0,0 +1,60 @@ +#### Subscribers ######## +subscribers: + front_upper_depth: + topic_name: /depth_camera_front_upper/point_cloud_self_filtered + data_type: pointcloud + + front_lower_depth: + topic_name: /depth_camera_front_lower/point_cloud_self_filtered + data_type: pointcloud + + rear_upper_depth: + topic_name: /depth_camera_rear_upper/point_cloud_self_filtered + data_type: pointcloud + + rear_lower_depth: + topic_name: /depth_camera_rear_lower/point_cloud_self_filtered + data_type: pointcloud + + left_depth: + topic_name: /depth_camera_left/point_cloud_self_filtered + data_type: pointcloud + + right_depth: + topic_name: /depth_camera_right/point_cloud_self_filtered + data_type: pointcloud + + velodyne: + topic_name: /point_cloud_filter/lidar/point_cloud_filtered + data_type: pointcloud + + # Semantics + front_wide_angle: + topic_name: /wide_angle_camera_front/image_color_rect + camera_info_topic_name: /wide_angle_camera_front/camera_info + data_type: image + + rear_wide_angle: + topic_name: /wide_angle_camera_rear/image_color_rect + camera_info_topic_name: /wide_angle_camera_rear/camera_info + data_type: image + + # Traversability + # channels: ["visual_traversability"] + sem_wvn_front_traversability: + topic_name: '/wild_visual_navigation_node/front/traversability' + camera_info_topic_name: '/wild_visual_navigation_node/front/camera_info' + channels: ['visual_traversability'] + data_type: image + + sem_wvn_rear_traversability: + topic_name: '/wild_visual_navigation_node/rear/traversability' + camera_info_topic_name: '/wild_visual_navigation_node/rear/camera_info' + channels: ['visual_traversability'] + data_type: image + + # sem_wvn_front_confidence: + # topic_name: '/wild_visual_navigation_node/front/confidence' + # camera_info_topic_name: '/wild_visual_navigation_node/front/camera_info' + # channels: ['visual_traversability'] + # data_type: image \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/field_local_planner/visual/filter_chain.yaml b/wild_visual_navigation_anymal/config/field_local_planner/geometric/filter_chain.yaml similarity index 53% rename from wild_visual_navigation_ros/config/field_local_planner/visual/filter_chain.yaml rename to wild_visual_navigation_anymal/config/field_local_planner/geometric/filter_chain.yaml index a0dbfb42..4c21672c 100644 --- a/wild_visual_navigation_ros/config/field_local_planner/visual/filter_chain.yaml +++ b/wild_visual_navigation_anymal/config/field_local_planner/geometric/filter_chain.yaml @@ -11,18 +11,14 @@ grid_map_filters: output_layer: unknown_space # Inpainting - - name: denoise_inpaint - type: gridMapFiltersDrs/DenoiseAndInpaintFilter + - name: inpaint + type: gridMapFiltersDrs/InpaintFilter params: input_layer: elevation output_layer: elevation_inpainted radius: 0.1 # in m inpainting_type: telea # 'ns' or 'telea' allowed - pre_denoising: false # enables denoising before inpainting - denoising_radius: 0.5 # m - denoising_type: non_local # 'total_variation', 'non_local', 'gaussian', and 'median' supported - total_variation_lambda: 1.0 - + # Compute surface normals (fast) - name: surface_normals type: gridMapFiltersDrs/FastNormalsVectorFilter @@ -56,55 +52,61 @@ grid_map_filters: - name: traversability type: gridMapFilters/MathExpressionFilter params: - output_layer: geometric_traversability + output_layer: raw_traversability # expression: (0.5 * (1.0 - (slope / 0.6)) + 0.5 * height_traversability) .* (1.0 - unknown_space) expression: (0.5 * (1.0 - (slope / 0.6)) + 0.5 * height_traversability) - - - name: footprint_mask - type: gridMapFiltersDrs/FootprintMaskFilter - params: - output_layer: footprint_mask - footprint_frame: base - type: rectangle - length: 1.0 - width: 0.5 - clearance: 0.1 - - # Inpaint visual traversability - - name: inpaint_visual_traversability - type: gridMapFiltersDrs/DenoiseAndInpaintFilter + + # Denoising + - name: smooth_traversability + type: gridMapFiltersDrs/DenoiseFilter params: - input_layer: visual_traversability - output_layer: visual_traversability_inpainted - radius: 0.1 # in m - inpainting_type: telea # 'ns' or 'telea' allowed - pre_denoising: true # enables denoising before inpainting - denoising_radius: 0.12 # m - denoising_type: median # 'total_variation', 'non_local', 'gaussian', and 'median' supported - total_variation_lambda: 1.0 + input_layer: raw_traversability + output_layer: traversability_smoothed + radius: 0.5 # m + type: gaussian # 'total_variation', 'non_local', 'gaussian', and 'median' supported - - name: visual_traversability_footprint_fix + # Denoising + - name: rescale_traversability type: gridMapFilters/MathExpressionFilter params: - output_layer: visual_traversability_inpainted - expression: (1.0 - footprint_mask) .* visual_traversability_inpainted + footprint_mask + output_layer: traversability + #expression: (traversability_smoothed - minOfFinites(traversability_smoothed)) / maxOfFinites(traversability_smoothed) - minOfFinites(traversability_smoothed)) .* (maxOfFinites(traversability_smoothed) - minOfFinites(traversability_smoothed)) + minOfFinites(traversability_smoothed) + expression: traversability_smoothed + + # - name: inflate_traversability + # type: gridMapFilters/SlidingWindowMathExpressionFilter + # params: + # input_layer: traversability + # output_layer: traversability + # expression: minOfFinites(traversability) #minOfFinites(traversability) + # compute_empty_cells: false + # edge_handling: empty # options: inside, crop, empty, mean + # # window_size: 5 # in number of cells (optional, default: 3), make sure to make this compatible with the kernel matrix + # window_length: 0.4 # instead of window_size, in m # Compute 2D Signed Distance Field - name: sdf type: gridMapFiltersDrs/SignedDistanceField2dFilter params: - input_layer: visual_traversability_inpainted + input_layer: traversability output_layer: sdf normalize_gradients: true threshold: 0.1 + # Compute cost using traversability + - name: traversability_to_cost + type: gridMapFilters/MathExpressionFilter + params: + output_layer: cost + expression: 10*(1.0 - traversability) + 1.0 + - name: geodesic - type: gridMapFiltersDrs/GeodesicDistanceField2dFilter + # type: gridMapFiltersDrs/GeodesicDistanceField2dFilter + type: gridMapFiltersDrs/GeodesicFieldFilter params: - input_layer: visual_traversability_inpainted + input_layer: cost output_layer: geodesic normalize_gradients: true attractor_topic: /field_local_planner/current_goal - threshold: 0.1 # to be applied to the input layer - use_field_smoothing: false - field_smoothing_radius: 0.1 # m \ No newline at end of file + publish_path: true + path_topic: /geodesic_distance_filter/path \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/field_local_planner/geometric/filter_chain_wifi.yaml b/wild_visual_navigation_anymal/config/field_local_planner/geometric/filter_chain_wifi.yaml new file mode 100644 index 00000000..97367dcb --- /dev/null +++ b/wild_visual_navigation_anymal/config/field_local_planner/geometric/filter_chain_wifi.yaml @@ -0,0 +1,6 @@ +grid_map_filters: + + - name: delete + type: gridMapFiltersDrs/DeleteAllButFilter + params: + layers: [elevation, elevation_inpainted, traversability, cost, geodesic, sdf] # List of layers. \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/field_local_planner/rmp.yaml b/wild_visual_navigation_anymal/config/field_local_planner/rmp.yaml similarity index 90% rename from wild_visual_navigation_ros/config/field_local_planner/rmp.yaml rename to wild_visual_navigation_anymal/config/field_local_planner/rmp.yaml index 8045a444..003bf63c 100644 --- a/wild_visual_navigation_ros/config/field_local_planner/rmp.yaml +++ b/wild_visual_navigation_anymal/config/field_local_planner/rmp.yaml @@ -53,7 +53,8 @@ rmp: - geodesic_heading - goal_position - goal_orientation - - damping + # - damping + - regularization - id: front_left point_factor: [0.6, 0.3] @@ -61,6 +62,7 @@ rmp: radius: 0.3 affected_by: - sdf_obstacle + - sdf_obstacle_damping - id: front_right point_factor: [0.6, -0.3] @@ -68,6 +70,7 @@ rmp: radius: 0.3 affected_by: - sdf_obstacle + - sdf_obstacle_damping - id: back_left point_factor: [-0.6, 0.3] @@ -75,6 +78,7 @@ rmp: radius: 0.3 affected_by: - sdf_obstacle + - sdf_obstacle_damping - id: back_right point_factor: [-0.6, -0.3] @@ -82,6 +86,7 @@ rmp: radius: 0.3 affected_by: - sdf_obstacle + - sdf_obstacle_damping # This defines the parameters for the RMPs policies: @@ -141,6 +146,14 @@ rmp: type: projector_invlogistic offset: 1.0 steepness: 1.0 + sdf_obstacle_damping: + weight: 1.0 # 1.0 + gain: 0.1 + color: [1.0, 0.0, 0.0] # red + metric: + type: projector_invlogistic + offset: 1.0 + steepness: 1.0 regularization: weight: 0.0 # 1.0 gain: 0.001 @@ -148,4 +161,4 @@ rmp: metric: type: constant offset: 0.0 - steepness: 1.0 + steepness: 1.0 \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/field_local_planner/geometric/filter_chain.yaml b/wild_visual_navigation_anymal/config/field_local_planner/visual/filter_chain.yaml similarity index 70% rename from wild_visual_navigation_ros/config/field_local_planner/geometric/filter_chain.yaml rename to wild_visual_navigation_anymal/config/field_local_planner/visual/filter_chain.yaml index 118e8a27..0284fe5c 100644 --- a/wild_visual_navigation_ros/config/field_local_planner/geometric/filter_chain.yaml +++ b/wild_visual_navigation_anymal/config/field_local_planner/visual/filter_chain.yaml @@ -11,17 +11,13 @@ grid_map_filters: output_layer: unknown_space # Inpainting - - name: denoise_inpaint - type: gridMapFiltersDrs/DenoiseAndInpaintFilter + - name: inpaint + type: gridMapFiltersDrs/InpaintFilter params: input_layer: elevation output_layer: elevation_inpainted radius: 0.1 # in m inpainting_type: telea # 'ns' or 'telea' allowed - pre_denoising: false # enables denoising before inpainting - denoising_radius: 0.5 # m - denoising_type: non_local # 'total_variation', 'non_local', 'gaussian', and 'median' supported - total_variation_lambda: 1.0 # Compute surface normals (fast) - name: surface_normals @@ -59,23 +55,40 @@ grid_map_filters: output_layer: geometric_traversability # expression: (0.5 * (1.0 - (slope / 0.6)) + 0.5 * height_traversability) .* (1.0 - unknown_space) expression: (0.5 * (1.0 - (slope / 0.6)) + 0.5 * height_traversability) + + # Inpaint visual traversability + - name: inpaint_visual_traversability + type: gridMapFiltersDrs/DenoiseFilter + params: + input_layer: visual_traversability + output_layer: visual_traversability_inpainted + radius: 0.12 # m + type: median # 'total_variation', 'non_local', 'gaussian', and 'median' supported + total_variation_lambda: 1.0 # Compute 2D Signed Distance Field - name: sdf type: gridMapFiltersDrs/SignedDistanceField2dFilter params: - input_layer: geometric_traversability + input_layer: visual_traversability_inpainted output_layer: sdf normalize_gradients: true threshold: 0.1 + # Compute cost using traversability + - name: traversability_to_cost + type: gridMapFilters/MathExpressionFilter + params: + output_layer: cost + expression: 10*(1.0 - visual_traversability_inpainted) + 1.0 + - name: geodesic - type: gridMapFiltersDrs/GeodesicDistanceField2dFilter + # type: gridMapFiltersDrs/GeodesicDistanceField2dFilter + type: gridMapFiltersDrs/GeodesicFieldFilter params: - input_layer: geometric_traversability + input_layer: cost output_layer: geodesic normalize_gradients: true attractor_topic: /field_local_planner/current_goal - threshold: 0.1 # to be applied to the input layer - use_field_smoothing: false - field_smoothing_radius: 0.1 # m \ No newline at end of file + publish_path: true + path_topic: /geodesic_distance_filter/path \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/field_local_planner/geometric/filter_chain_wifi.yaml b/wild_visual_navigation_anymal/config/field_local_planner/visual/filter_chain_wifi.yaml similarity index 66% rename from wild_visual_navigation_ros/config/field_local_planner/geometric/filter_chain_wifi.yaml rename to wild_visual_navigation_anymal/config/field_local_planner/visual/filter_chain_wifi.yaml index ef49d560..ea53314a 100644 --- a/wild_visual_navigation_ros/config/field_local_planner/geometric/filter_chain_wifi.yaml +++ b/wild_visual_navigation_anymal/config/field_local_planner/visual/filter_chain_wifi.yaml @@ -3,4 +3,4 @@ grid_map_filters: - name: delete type: gridMapFiltersDrs/DeleteAllButFilter params: - layers: [elevation, elevation_inpainted, geometric_traversability, geodesic, sdf] # List of layers. \ No newline at end of file + layers: [elevation, elevation_inpainted, geometric_traversability, visual_traversability_inpainted, cost, geodesic, sdf] # List of layers. \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/procman/record_rosbags.pmd b/wild_visual_navigation_anymal/config/procman/record_rosbags.pmd new file mode 100644 index 00000000..3dc5ab4e --- /dev/null +++ b/wild_visual_navigation_anymal/config/procman/record_rosbags.pmd @@ -0,0 +1,61 @@ +group "0 - Setup" { + cmd "0.0 - anymal_rsl_replay" { + exec = "rosrun anymal_rsl_launch replay.py d /Data/2024_02_06_Dodo_MPI_WVN/dodo_mission_2024_02_06/2024-02-06-14-39-01_Small_MPI_Indoor/2024-02-06-14-39-01_lpc.yaml"; + host = "localhost"; + } + cmd "0.1 - anymal_msg_converter" { + exec = "rosrun wild_visual_navigation_anymal anymal_msg_converter_node.py"; + host = "localhost"; + } + cmd "0.2 - resize_images_wide_angle_dual" { + exec = "roslaunch wild_visual_navigation_anymal resize_images_wide_angle_dual.launch"; + host = "localhost"; + } + cmd "0.3 - resize_hdr" { + exec = "roslaunch wild_visual_navigation_anymal resize_hdr.launch"; + host = "localhost"; + } + cmd "0.4 - uncompress_front" { + exec = "rosrun image_transport republish compressed in:=/wide_angle_camera_front/image_color_rect raw out:=/wide_angle_camera_front/image_color_rect"; + host = "localhost"; + } + cmd "0.5 - uncompress_rear" { + exec = "rosrun image_transport republish compressed in:=/wide_angle_camera_rear/image_color_rect raw out:=/wide_angle_camera_rear/image_color_rect"; + host = "localhost"; + } + cmd "0.6 - uncompress_hdr" { + exec = "rosrun image_transport republish compressed in:=/hdr_camera/image_raw raw out:=/hdr_camera/image_raw"; + host = "localhost"; + } + cmd "0.7 - record_coordinate" { + exec = "rosrun anymal_rsl_recording rosbag_record_coordinator.py"; + host = "localhost"; + } + cmd "0.8 - record_node" { + exec = "rosrun anymal_rsl_recording rosbag_record_node.py __name:=rosbag_record_node_ge76"; + host = "localhost"; + } + cmd "0.9 - rviz" { + exec = "roslaunch wild_visual_navigation_anymal view.launch"; + host = "localhost"; + } +} + + +group "1 - Recording" { + cmd "1.1 - start recording" { + exec = "rosservice call /rosbag_record_robot_coordinator/start_recording \"yaml_file: '/home/jonfrey/workspaces/catkin_ws/src/wild_visual_navigation/wild_visual_navigation_anymal/config/recording/create_open_source_rosbags.yaml'\""; + host = "localhost"; + } + cmd "1.2 - stop recording" { + exec = "rosservice call /rosbag_record_robot_coordinator/stop_recording \"verbose: false\""; + host = "localhost"; + } +} + +group "2 - bag play" { + cmd "2.0 - rosbag play" { + exec = "rosbag_play --sem --tf --flp --wvn --sem /Data/2024_02_06_Dodo_MPI_WVN/dodo_mission_2024_02_06/2024-02-06-14-39-01_Small_MPI_Indoor/*.bag -r 0.5 -l "; + host = "localhost"; + } +} \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/procman/replay.pmd b/wild_visual_navigation_anymal/config/procman/replay.pmd new file mode 100644 index 00000000..a9f4a7c2 --- /dev/null +++ b/wild_visual_navigation_anymal/config/procman/replay.pmd @@ -0,0 +1,99 @@ +group "0 - Setup" { + cmd "0.0 - anymal_rsl_replay" { + exec = "rosrun anymal_rsl_launch replay.py d /Data/2024_02_06_Dodo_MPI_WVN/dodo_mission_2024_02_06/2024-02-06-14-39-01_Small_MPI_Indoor/2024-02-06-14-39-01_lpc.yaml"; + host = "localhost"; + } + cmd "0.1 - anymal_msg_converter" { + exec = "rosrun wild_visual_navigation_anymal anymal_msg_converter_node.py"; + host = "localhost"; + } + cmd "0.2 - rviz anymal" { + exec = "roslaunch wild_visual_navigation_anymal view.launch"; + host = "localhost"; + } + cmd "0.3 - resize_images_wide_angle_front" { + exec = "roslaunch wild_visual_navigation_anymal resize_images_wide_angle_front.launch"; + host = "localhost"; + } + cmd "0.4 - resize_images_wide_angle_rear" { + exec = "roslaunch wild_visual_navigation_anymal resize_images_wide_angle_rear.launch"; + host = "localhost"; + } + cmd "0.5 - rqt learning graph" { + exec = "rosrun rqt_multiplot rqt_multiplot"; + host = "localhost"; + } + cmd "0.6 - rosbag play" { + exec = "rosbag_play --sem --tf --flp --wvn --sem /Data/2024_02_06_Dodo_MPI_WVN/dodo_mission_2024_02_06/2024-02-06-14-39-01_Small_MPI_Indoor/*.bag -r 0.5 -l "; + host = "localhost"; + } +} + +group "1 - WVN" { + cmd "1.1 - wild_visual_navigation - resize - elevation mapping - overlay" { + exec = "roslaunch wild_visual_navigation_anymal robot.launch camera:=wide_angle_dual_resize"; + host = "localhost"; + } + + cmd "1.2 - overlay" { + exec = "roslaunch wild_visual_navigation_anymal overlay_images.launch"; + host = "localhost"; + } + cmd "1.3 - elevation_mapping_cupy" { + exec = "roslaunch wild_visual_navigation_anymal elevation_mapping_cupy.launch"; + host = "localhost"; + } + cmd "1.4 - wild_visual_navigation_learning" { + exec = "python3 /home/jonfrey/workspaces/catkin_ws/src/wild_visual_navigation/wild_visual_navigation_ros/scripts/wvn_learning_node.py"; + host = "localhost"; + } + cmd "1.5 - wild_visual_navigation_feature_extractor" { + exec = "python3 /home/jonfrey/workspaces/catkin_ws/src/wild_visual_navigation/wild_visual_navigation_ros/scripts/wvn_feature_extractor_node.py"; + host = "localhost"; + } + cmd "1.6 - kill_wvn" { + exec = "rosnode kill /wvn_learning_node /wvn_feature_extractor_node"; + host = "localhost"; + } +} + +group "3 - Config" { + cmd "3.1 - dynamic_reconfigure" { + exec = "rosrun rqt_reconfigure rqt_reconfigure"; + host = "localhost"; + } +} + +group "4 - Uncompress" { + cmd "4.1 - front" { + exec = "rosrun image_transport republish compressed in:=/wide_angle_camera_front/image_color_rect raw out:=/wide_angle_camera_front/image_color_rect"; + host = "localhost"; + } + cmd "4.2 - rear" { + exec = "rosrun image_transport republish compressed in:=/wide_angle_camera_rear/image_color_rect raw out:=/wide_angle_camera_rear/image_color_rect"; + host = "localhost"; + } + cmd "4.3 - hdr" { + exec = "rosrun image_transport republish compressed in:=/hdr_camera/image_raw raw out:=/hdr_camera/image_raw"; + host = "localhost"; + } +} + +group "5 - Interface" { + cmd "5.1 - pause_training" { + exec = "rosservice call /wild_visual_navigation_node/pause_learning 'data: true'"; + host = "localhost"; + } + cmd "5.2 - resume_training" { + exec = "rosservice call /wild_visual_navigation_node/pause_learning 'data: false'"; + host = "localhost"; + } + cmd "5.3 - save_checkpoint" { + exec = "rosservice call /wild_visual_navigation_node/save_checkpoint ''"; + host = "localhost"; + } + cmd "5.4 - load_checkpoint" { + exec = "rosservice call /wild_visual_navigation_node/load_checkpoint 'path: 'absolute_path_in_robot_filesystem'"; + host = "localhost"; + } +} \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/procman/robot.pmd b/wild_visual_navigation_anymal/config/procman/robot.pmd similarity index 78% rename from wild_visual_navigation_ros/config/procman/robot.pmd rename to wild_visual_navigation_anymal/config/procman/robot.pmd index 05e40b96..749eadba 100644 --- a/wild_visual_navigation_ros/config/procman/robot.pmd +++ b/wild_visual_navigation_anymal/config/procman/robot.pmd @@ -33,10 +33,17 @@ group "0.npc" { } } -group "1.jetson" { +group "1.xavier" { + cmd "elevation_mapping" { + exec = "roslaunch wild_visual_navigation_ros elevation_mapping_cupy.launch"; + host = "anymal_coyote_xavier"; + } +} + +group "2.orin" { cmd "wild_visual_navigation" { exec = "roslaunch wild_visual_navigation_ros robot.launch"; - host = "apc-digiforest"; + host = "anymal_coyote_orin"; } cmd "kill_wvn" { @@ -45,33 +52,33 @@ group "1.jetson" { } } -group "2.visualization" { +group "3.visualization" { cmd "rviz" { exec = "roslaunch wild_visual_navigation_ros view.launch"; host = "localhost"; } } -group "3.recording" { - cmd "3.1.rosbag_record" { +group "4.recording" { + cmd "4.1.rosbag_record" { exec = "rosservice call /rosbag_record_robot_coordinator/record_bag {}"; host = "localhost"; } - cmd "3.2.rosbag_stop" { + cmd "4.2.rosbag_stop" { exec = "rosservice call /rosbag_record_robot_coordinator/stop_bag {}"; host = "localhost"; } - cmd "3.3.fetch_bags" { + cmd "4.3.fetch_bags" { exec = "/home/ori/git/anymal_rsl/anymal_rsl/anymal_rsl_utils/anymal_rsl_recording/anymal_rsl_recording/bin/copy_mission_data_from_robot.sh digiforest coyote /home/ori/logs"; host = "localhost"; } - cmd "3.4.delete_bags" { + cmd "4.4.delete_bags" { exec = "/home/ori/git/anymal_rsl/anymal_rsl/anymal_rsl_utils/anymal_rsl_recording/anymal_rsl_recording/bin/remove_mission_data_from_robot.sh digiforest coyote"; host = "localhost"; } } -group "4.monitoring" { +group "5.monitoring" { cmd "lpc_disk" { exec = "rostopic echo /disk_monitor_lpc/status/disks[1]"; host = "localhost"; @@ -90,13 +97,28 @@ group "4.monitoring" { } } -group "5.configuration" { - cmd "5.1.dynamic_reconfigure" { +group "6.configuration" { + cmd "6.1.dynamic_reconfigure" { exec = "rosrun rqt_reconfigure rqt_reconfigure"; host = "localhost"; } } +group "7.white_balance" { + cmd "7.1.white_balance_front_reset" { + exec = "rosservice call /alphasense_raw_image_pipeline_front/reset_white_balance"; + host = "localhost"; + } + cmd "7.2.white_balance_left_reset" { + exec = "rosservice call /alphasense_raw_image_pipeline_left/reset_white_balance"; + host = "localhost"; + } + cmd "7.3.white_balance_right_reset" { + exec = "rosservice call /alphasense_raw_image_pipeline_right/reset_white_balance"; + host = "localhost"; + } +} + group "x.learning_utils" { cmd "x.1.pause_training" { exec = "rosservice call /wild_visual_navigation_node/pause_learning 'data: true'"; @@ -108,7 +130,7 @@ group "x.learning_utils" { } cmd "x.3.save_checkpoint" { exec = "rosservice call /wild_visual_navigation_node/save_checkpoint ''"; - host = "apc-digiforest"; + host = "anymal_coyote_orin"; } cmd "x.4.load_checkpoint" { exec = "rosservice call /wild_visual_navigation_node/load_checkpoint 'path: 'absolute_path_in_robot_filesystem'"; diff --git a/wild_visual_navigation_anymal/config/recording/create_open_source_rosbags.yaml b/wild_visual_navigation_anymal/config/recording/create_open_source_rosbags.yaml new file mode 100644 index 00000000..4fdb5b2a --- /dev/null +++ b/wild_visual_navigation_anymal/config/recording/create_open_source_rosbags.yaml @@ -0,0 +1,23 @@ +ge76: + open_source: + - /tf + - /tf_static + - /clock + - /motion_reference/command_twist + - /debug_info + - /twist_mux/twist + - /depth_camera_front_upper/point_cloud_self_filtered + - /depth_camera_front_lower/point_cloud_self_filtered + - /depth_camera_rear_upper/point_cloud_self_filtered + - /depth_camera_rear_lower/point_cloud_self_filtered + - /depth_camera_left/point_cloud_self_filtered + - /depth_camera_right/point_cloud_self_filtered + - /state_estimator/pose_in_odom + - /point_cloud_filter/lidar/point_cloud_filtered + - /wide_angle_camera_front/image_color_rect_resize + - /wide_angle_camera_front_resize/camera_info + - /wide_angle_camera_rear/image_color_rect_resize + - /wide_angle_camera_rear_resize/camera_info + - /wild_visual_navigation_node/robot_state + - /hdr_camera_resize/image_raw + - /hdr_camera_resize/camera_info diff --git a/wild_visual_navigation_anymal/config/recording/dodo.yaml b/wild_visual_navigation_anymal/config/recording/dodo.yaml new file mode 100644 index 00000000..203a100f --- /dev/null +++ b/wild_visual_navigation_anymal/config/recording/dodo.yaml @@ -0,0 +1,157 @@ +lpc: + generic: + - /anymal_low_level_controller/actuator_readings + - /anymal_low_level_controller/actuator_readings_extended_throttled + - /chrony_monitor_lpc/status + - /cpu_loupe_lpc/cpu_loupe + - /cpu_monitor_lpc/status + - /anyjoy/rcu + - /motion_reference/command_pose + - /motion_reference/command_twist + - /debug_info + - /twist_mux/rcu + - /twist_mux/twist + - /disk_monitor_lpc/status + - /distance_tracker/total_distance + - /distance_tracker/display + - /foot_scan + - /memory_monitor_lpc/status + - /path_logger/logged_path + - /path_logger/reverse_path + - /ping_monitor_anymal/markers + - /pdb/battery_state + - /pdb/intrinsic_state + - /pdb/power_state + - /resource_database/reports + - /sensors/battery_voltage + - /soft_emcy_stop + + depth_cameras_front_back: + - /depth_camera_front_upper/point_cloud_self_filtered + - /depth_camera_front_lower/point_cloud_self_filtered + - /depth_camera_rear_upper/point_cloud_self_filtered + - /depth_camera_rear_lower/point_cloud_self_filtered + # Front Upper + # - /depth_camera_front_upper/depth/camera_info + # - /depth_camera_front_upper/depth/image_rect_raw + # - /depth_camera_front_upper/infra1/image_rect_raw + # Front Lower + # - /depth_camera_front_lower/depth/camera_info + # - /depth_camera_front_lower/depth/image_rect_raw + # - /depth_camera_front_lower/infra1/image_rect_raw + # Rear Lower + # - /depth_camera_rear_lower/depth/camera_info + # - /depth_camera_rear_lower/depth/image_rect_raw + # - /depth_camera_rear_lower/infra1/image_rect_raw + # Rear Upper + # - /depth_camera_rear_upper/depth/camera_info + # - /depth_camera_rear_upper/depth/image_rect_raw + # - /depth_camera_rear_upper/infra1/image_rect_raw + + imu: + - /sensors/imu + state_estimator: + - /state_estimator/actuator_readings + - /state_estimator/anymal_state + - /state_estimator/contact_force_lf_foot + - /state_estimator/contact_force_lh_foot + - /state_estimator/contact_force_rf_foot + - /state_estimator/contact_force_rh_foot + - /state_estimator/disable_change_notification + - /state_estimator/enable_change_notification + - /state_estimator/force_calibrator_commands + - /state_estimator/imu + - /state_estimator/imu_angular_velocity_bias + - /state_estimator/imu_linear_acceleration_bias + - /state_estimator/init_change_notification + - /state_estimator/joint_states + - /state_estimator/odometry + - /state_estimator/pose_in_odom + - /state_estimator/reset_notification + - /state_estimator/twist + - /state_estimator/twist_throttle + - /state_estimator/zero_velocity_update_notification + tf: + - /tf + - /tf_static + clock: + - /clock +npc: + lidar: + - /lidar/packets + - /point_cloud_filter/lidar/point_cloud_filtered + # - /point_cloud_filter/lidar/point_cloud_filtered + wide_angle_camera: + - /wide_angle_camera_front/image_color_rect/compressed + - /wide_angle_camera_front/camera_info + - /wide_angle_camera_rear/image_color_rect/compressed + - /wide_angle_camera_rear/camera_info + compslam: + - /compslam_lio/full_path + - /compslam_lio/odometry + - /compslam_lio/map + - /compslam_lio/map_optimized + depth_cameras_side: + # Right + # - /depth_camera_right/depth/camera_info + # - /depth_camera_right/depth/image_rect_raw + #- /depth_camera_right/infra1/image_rect_raw + # Left + # - /depth_camera_left/depth/camera_info + # - /depth_camera_left/depth/image_rect_raw + # - /depth_camera_left/infra1/image_rect_raw + - /depth_camera_left/point_cloud_self_filtered + - /depth_camera_right/point_cloud_self_filtered + generic: + - /anymal/absolute_reference + - /behavior_engine/status + - /chrony_monitor_npc/status + - /clicked_point + - /colorless_uniform_mapper/local_point_cloud + - /transform_aft_mapped_to_init_CORRECTED + - /disk_monitor_npc/status + - /integrated_to_init_CORRECTED + - /local_guidance_path_follower/controller_reference_marker + - /local_guidance_path_manager/received_global_path + - /memory_monitor_npc/status + - /msf_compslam_lio_body_imu/msf_core/odometry + - /network_monitor_npc/status + - /pinger_npc_to_jetson/ping + - /pinger_npc_to_lpc/ping + - /cpu_monitor_npc/status + - /cpu_loupe_npc/cpu_loupe + - /resource_database/reports +jetson: + elevation_mapping: + - /elevation_mapping/elevation_map_raw + - /elevation_mapping/semantic_map + generic: + - /chrony_monitor_jetson/status + - /cpu_loupe_jetson/cpu_loupe + - /disk_monitor_jetson/status + - /gpu_monitor_jetson + - /memory_monitor_jetson/status + - /network_monitor_jetson/status + - /safety_checker/traversabiliy_safety_status + - /safety_checker/footprint_polygon + - /safety_checker/untraversable_polygon + - /pinger_jetson_to_lpc/ping + - /pinger_jetson_to_npc/ping + - /pinger_jetson_to_apc/ping + hdr_camera: + - /hdr_camera/image_raw/compressed + - /hdr_camera/camera_info + wvn: + - /wild_visual_navigation_node/front/camera_info + - /wild_visual_navigation_node/front/confidence + - /wild_visual_navigation_node/front/feat + - /wild_visual_navigation_node/front/image_input + - /wild_visual_navigation_node/front/traversability + - /wild_visual_navigation_node/graph_footprints + - /wild_visual_navigation_node/graph_footprints_array + - /wild_visual_navigation_node/rear/camera_info + - /wild_visual_navigation_node/rear/confidence + - /wild_visual_navigation_node/rear/image_input + - /wild_visual_navigation_node/rear/traversability + - /wild_visual_navigation_node/robot_state + - /wild_visual_navigation_node/supervision_graph \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/recording/start_recording.sh b/wild_visual_navigation_anymal/config/recording/start_recording.sh new file mode 100755 index 00000000..383db851 --- /dev/null +++ b/wild_visual_navigation_anymal/config/recording/start_recording.sh @@ -0,0 +1 @@ +rosservice call /rosbag_record_coordinator/start_recording "yaml_file: '/home/jonfrey/git/wild_visual_navigation/wild_visual_navigation_anymal/config/recording/dodo.yaml'" \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/recording/stop_recording.sh b/wild_visual_navigation_anymal/config/recording/stop_recording.sh new file mode 100755 index 00000000..afe1a69e --- /dev/null +++ b/wild_visual_navigation_anymal/config/recording/stop_recording.sh @@ -0,0 +1 @@ +rosservice call /rosbag_record_coordinator/stop_recording "verbose: false" \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/rviz/anymal.rviz b/wild_visual_navigation_anymal/config/rviz/anymal.rviz new file mode 100644 index 00000000..4ab62272 --- /dev/null +++ b/wild_visual_navigation_anymal/config/rviz/anymal.rviz @@ -0,0 +1,1272 @@ +Panels: + - Class: rviz/Displays + Help Height: 0 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /TF1/Tree1 + - /Robot Info1 + - /Wild Visual Navigation1 + - /Wild Visual Navigation1/Self Supervision1 + - /Wild Visual Navigation1/Prediction1 + - /GridMaps1 + - /GridMaps1/Visual Traversability1 + Splitter Ratio: 0.6433260440826416 + Tree Height: 1310 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.5886790156364441 + - Class: rviz/Views + Expanded: + - /Current View1 + - /ThirdPersonFollower1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Name: Time + SyncMode: 0 + SyncSource: AB Wide Rear Front +Preferences: + PromptSaveOnExit: true +Toolbars: + toolButtonStyle: 2 +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: false + Line Style: + Line Width: 0.029999999329447746 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 20 + Reference Frame: + Value: false + - Class: rviz/TF + Enabled: false + Frame Timeout: 1000 + Frames: + All Enabled: false + Marker Alpha: 1 + Marker Scale: 0.5 + Name: TF + Show Arrows: true + Show Axes: true + Show Names: true + Tree: + {} + Update Interval: 0 + Value: false + - Class: rviz/Group + Displays: + - Angle Tolerance: 0.10000000149011612 + Class: rviz/Odometry + Covariance: + Orientation: + Alpha: 0.5 + Color: 255; 255; 127 + Color Style: Unique + Frame: Local + Offset: 1 + Scale: 1 + Value: true + Position: + Alpha: 0.30000001192092896 + Color: 204; 51; 204 + Scale: 1 + Value: true + Value: true + Enabled: false + Keep: 200 + Name: Odometry + Position Tolerance: 1 + Queue Size: 10 + Shape: + Alpha: 1 + Axes Length: 1 + Axes Radius: 0.10000000149011612 + Color: 255; 25; 0 + Head Length: 0.30000001192092896 + Head Radius: 0.10000000149011612 + Shaft Length: 1 + Shaft Radius: 0.05000000074505806 + Value: Arrow + Topic: /loam/odometry + Unreliable: false + Value: false + - Alpha: 1 + Class: rviz/RobotModel + Collision Enabled: false + Enabled: true + Links: + All Links Enabled: true + Expand Joint Details: false + Expand Link Details: false + Expand Tree: false + LF_FOOT: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_HAA_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_HFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false + LF_HIP: + Alpha: 1 + Show Axes: false + Show Trail: false + LF_KFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_SHANK: + Alpha: 1 + Show Axes: false + Show Trail: false + LF_THIGH: + Alpha: 1 + Show Axes: false + Show Trail: false + LF_hip_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_shank_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_thigh_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_FOOT: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_HAA_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_HFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false + LH_HIP: + Alpha: 1 + Show Axes: false + Show Trail: false + LH_KFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_SHANK: + Alpha: 1 + Show Axes: false + Show Trail: false + LH_THIGH: + Alpha: 1 + Show Axes: false + Show Trail: false + LH_hip_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_shank_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_thigh_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + Link Tree Style: Links in Alphabetic Order + RF_FOOT: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_HAA_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_HFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false + RF_HIP: + Alpha: 1 + Show Axes: false + Show Trail: false + RF_KFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_SHANK: + Alpha: 1 + Show Axes: false + Show Trail: false + RF_THIGH: + Alpha: 1 + Show Axes: false + Show Trail: false + RF_hip_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_shank_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_thigh_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_FOOT: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_HAA_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_HFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false + RH_HIP: + Alpha: 1 + Show Axes: false + Show Trail: false + RH_KFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_SHANK: + Alpha: 1 + Show Axes: false + Show Trail: false + RH_THIGH: + Alpha: 1 + Show Axes: false + Show Trail: false + RH_hip_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_shank_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_thigh_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + base: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + base_inertia: + Alpha: 1 + Show Axes: false + Show Trail: false + battery: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + body_top: + Alpha: 1 + Show Axes: false + Show Trail: false + bottom_shell: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + depth_camera_front_lower_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_front_lower_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_front_upper_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_front_upper_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_left_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_left_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_rear_lower_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_rear_lower_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_rear_upper_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_rear_upper_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_right_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_right_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + docking_socket: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + face_front: + Alpha: 1 + Show Axes: false + Show Trail: false + face_rear: + Alpha: 1 + Show Axes: false + Show Trail: false + face_shell_front: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + face_shell_rear: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + hatch_shell: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + hbc_receiver: + Alpha: 1 + Show Axes: false + Show Trail: false + imu_link: + Alpha: 1 + Show Axes: false + Show Trail: false + lidar: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + lidar_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + top_shell: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + wide_angle_camera_front_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + wide_angle_camera_front_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + wide_angle_camera_rear_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + wide_angle_camera_rear_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + Name: RobotModel + Robot Description: anymal_description + TF Prefix: "" + Update Interval: 0 + Value: true + Visual Enabled: true + Enabled: true + Name: Robot Info + - Class: rviz/Group + Displays: + - Class: rviz/Group + Displays: + - Class: rviz/Marker + Enabled: true + Marker Topic: /wild_visual_navigation_node/graph_footprints + Name: Footprint + Namespaces: + footprints: true + Queue Size: 100 + Value: true + - Alpha: 1 + Buffer Length: 1 + Class: rviz/Path + Color: 67; 110; 176 + Enabled: true + Head Diameter: 0.30000001192092896 + Head Length: 0.20000000298023224 + Length: 0.30000001192092896 + Line Style: Billboards + Line Width: 0.029999999329447746 + Name: Supervision Graph + Offset: + X: 0 + Y: 0 + Z: 0 + Pose Color: 255; 85; 255 + Pose Style: None + Queue Size: 10 + Radius: 0.029999999329447746 + Shaft Diameter: 0.10000000149011612 + Shaft Length: 0.10000000149011612 + Topic: /wild_visual_navigation_node/supervision_graph + Unreliable: false + Value: true + - Alpha: 1 + Buffer Length: 1 + Class: rviz/Path + Color: 25; 255; 0 + Enabled: true + Head Diameter: 0.30000001192092896 + Head Length: 0.20000000298023224 + Length: 0.30000001192092896 + Line Style: Billboards + Line Width: 0.029999999329447746 + Name: Mission Graph + Offset: + X: 0 + Y: 0 + Z: 0 + Pose Color: 255; 85; 255 + Pose Style: Axes + Queue Size: 10 + Radius: 0.029999999329447746 + Shaft Diameter: 0.10000000149011612 + Shaft Length: 0.10000000149011612 + Topic: /wild_visual_navigation_node/mission_graph + Unreliable: false + Value: true + Enabled: true + Name: Self Supervision + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_image_traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Traversability + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_image_confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Uncertainty + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Alpha: 1 + Buffer Length: 1 + Class: rviz/Path + Color: 136; 138; 133 + Enabled: true + Head Diameter: 0.30000001192092896 + Head Length: 0.20000000298023224 + Length: 0.10000000149011612 + Line Style: Billboards + Line Width: 0.05000000074505806 + Name: Mission Graph + Offset: + X: 0 + Y: 0 + Z: 0 + Pose Color: 255; 85; 255 + Pose Style: Axes + Queue Size: 10 + Radius: 0.009999999776482582 + Shaft Diameter: 0.10000000149011612 + Shaft Length: 0.10000000149011612 + Topic: /wild_visual_navigation_node/mission_graph + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_mask + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Reprojected Path + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_labeled + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Supervision Signal + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: "" + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Placeholder + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: false + Name: Self Supervision (Debug) + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: true + Image Topic: /wide_angle_camera_front/image_color_rect + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: AB Wide Angle Front + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wide_angle_camera_rear/image_color_rect + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: AB Wide Rear Front + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: true + Name: Cameras + - Class: rviz/Group + Displays: + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-front + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_front_upper/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-rear + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_rear_upper/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-left + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_left/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-right + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_right/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 7.054330825805664 + Min Value: -0.766709566116333 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: AxisColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: velodyne + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.019999999552965164 + Style: Squares + Topic: /point_cloud_filter/lidar/point_cloud_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + Enabled: false + Name: Depth Sensors + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/front/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/front/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_overlay + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Learning Mask + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_visu_front_trav/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Traversability Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_visu_rear_trav/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Traversability Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: true + Name: Prediction + - Class: rviz/Group + Displays: + - Class: rviz/Marker + Enabled: true + Marker Topic: /field_local_planner/real_carrot + Name: Goal (carrot) + Namespaces: + {} + Queue Size: 100 + Value: true + - Alpha: 1 + Axes Length: 1 + Axes Radius: 0.10000000149011612 + Class: rviz/PoseWithCovariance + Color: 255; 25; 0 + Covariance: + Orientation: + Alpha: 0.5 + Color: 255; 255; 127 + Color Style: Unique + Frame: Local + Offset: 1 + Scale: 1 + Value: true + Position: + Alpha: 0.30000001192092896 + Color: 204; 51; 204 + Scale: 1 + Value: true + Value: true + Enabled: true + Head Length: 0.30000001192092896 + Head Radius: 0.10000000149011612 + Name: Goal (pose) + Queue Size: 10 + Shaft Length: 1 + Shaft Radius: 0.05000000074505806 + Shape: Arrow + Topic: /field_local_planner/current_goal + Unreliable: false + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: geodesic + Color Transformer: IntensityLayer + ColorMap: plasma + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_inpainted + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 5 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Geodesic Field (GDF) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + Enabled: true + Name: Local planner + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/left/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/left/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/left/confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Confidence Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/right/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/right/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/right/confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Confidence Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: false + Name: LR Prediction + Enabled: true + Name: Wild Visual Navigation + - Class: rviz/Group + Displays: + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: elevation + Color Transformer: GridMapLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: false + Max Color: 255; 255; 255 + Max Intensity: 10 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Elevation + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: geodesic + Color Transformer: IntensityLayer + ColorMap: plasma + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_inpainted + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 5 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Local Planner (GDF) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: sdf + Color Transformer: IntensityLayer + ColorMap: turbo + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: Layer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 2 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Local Planner (SDF) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: rgb + Color Transformer: ColorLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_with_semantics + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: RGB + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: visual_traversability + Color Transformer: "" + ColorMap: coolwarm + Enabled: true + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_with_semantics + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Visual Traversability + Show Grid Lines: false + Topic: /elevation_mapping/semantic_map + Unreliable: false + Use ColorMap: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: traversability + Color Transformer: "" + ColorMap: coolwarm + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Geom Traversability + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map + Unreliable: false + Use ColorMap: true + Value: false + Enabled: true + Name: GridMaps + Enabled: true + Global Options: + Background Color: 255; 255; 255 + Default Light: true + Fixed Frame: base + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Theta std deviation: 0.2617993950843811 + Topic: /initialpose + X std deviation: 0.5 + Y std deviation: 0.5 + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/XYOrbit + Distance: 12.523152351379395 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Field of View: 0.7853981852531433 + Focal Point: + X: -2.2413411140441895 + Y: -0.2522427439689636 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Current View + Near Clip Distance: 0.009999999776482582 + Pitch: 1.5697963237762451 + Target Frame: base + Yaw: 3.1414999961853027 + Saved: + - Class: rviz/ThirdPersonFollower + Distance: 22.303850173950195 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Field of View: 0.7853981852531433 + Focal Point: + X: 3.3520777225494385 + Y: 3.0211095809936523 + Z: -5.593323707580566 + Focal Shape Fixed Size: false + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: ThirdPersonFollower + Near Clip Distance: 0.009999999776482582 + Pitch: 1.5697963237762451 + Target Frame: base + Yaw: 0.0003485679626464844 +Window Geometry: + AB Wide Angle Front: + collapsed: false + AB Wide Rear Front: + collapsed: false + Displays: + collapsed: false + F Traversability Overlay: + collapsed: false + Height: 2096 + Hide Left Dock: false + Hide Right Dock: false + Input Image: + collapsed: false + L Confidence Raw: + collapsed: false + L Input Image: + collapsed: false + L Traversability Raw: + collapsed: false + Learning Mask: + collapsed: false + Placeholder: + collapsed: false + QMainWindow State: 000000ff00000000fd0000000400000000000001cb0000055bfc0200000011fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000005fb000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007301000002b80000055b000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d00650072006100000002ea000000c80000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640000000259000000520000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b000000027e000000b80000000000000000fb0000000a0049006d00610067006500000002eb000001280000000000000000fb000000140043006f006e0066006900640065006e0063006500000003b3000000c60000000000000000fb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c0061007900000002be000000160000000000000000fb0000003e004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790020005200650070006c0061007900000004a40000009f0000000000000000fb00000028004600200054007200610076006500720073006100620069006c00690074007900200052006100770000000482000000c10000000000000000fb00000028004600200054007200610076006500720073006100620069006c00690074007900200052006100770000000482000000c10000000000000000000000010000010f0000055bfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a0056006900650077007301000002b80000055b000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b200000000000000000000000200000eb800000275fc010000000ffb0000001c0049006d00610067006500200046006500610074007500720065007300000000bf000003690000000000000000fb00000024004100420020005700690064006500200052006500610072002000460072006f006e00740100000000000003d5000000ba00fffffffc0000012a0000010c0000000000fffffffa000000000200000003fb0000000a0049006d0061006700650000000000ffffffff0000000000000000fb0000002400460072006f006e0074002000280063006f006d007000720065007300730065006400290000000000ffffffff0000000000000000fb0000001e00410053002000460072006f006e00740020002800630061006d00340029000000003d0000010c0000000000000000fb00000030005200200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c0061007901000003db00000330000000d500fffffffb00000026004100420020005700690064006500200041006e0067006c0065002000460072006f006e00740100000711000003ca000000c200fffffffc0000049b000001670000000000fffffffaffffffff0200000003fb000000160049006e00700075007400200049006d006100670065000000003d000001ac0000001600fffffffb0000001a005200200049006e00700075007400200049006d0061006700650000000000ffffffff0000001600fffffffb0000001a004c00200049006e00700075007400200049006d0061006700650000000000ffffffff0000001600fffffffb0000000a0049006d00610067006501000003240000018a0000000000000000fb0000002c0054007200610076006500720073006100620069006c0069007400790020004f007600650072006c0061007900000004ae0000010c0000000000000000fc00000ae1000003d7000000d400fffffffa000000000200000007fb00000030004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790100000000ffffffff0000001600fffffffb000000160046002000540072006100760020004f0076006500720000000000ffffffff0000000000000000fb0000003e004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790020005200650070006c006100790100000000ffffffff0000000000000000fb000000240054007200610076006500720073006100620069006c0069007400790020005200610077000000003d0000012b0000001600fffffffb00000028005200200054007200610076006500720073006100620069006c00690074007900200052006100770000000000ffffffff0000001600fffffffb00000028004c00200054007200610076006500720073006100620069006c00690074007900200052006100770000000000ffffffff0000001600fffffffb00000030004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790000000000ffffffff0000000000000000fc000006cf000000b10000000000fffffffa000000000200000005fb00000036004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790020005200650070006c006100790100000000ffffffff0000000000000000fb0000001a004c006500610072006e0069006e00670020004d00610073006b000000003d0000012b0000001600fffffffb00000020005200200043006f006e0066006900640065006e0063006500200052006100770000000000ffffffff0000001600fffffffb00000020004c00200043006f006e0066006900640065006e0063006500200052006100770000000000ffffffff0000001600fffffffb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790000000000ffffffff0000000000000000fb000000160050006c0061006300650068006f006c00640065007200000006630000011d0000008500fffffffb000000160049006e00700075007400200049006d006100670065010000031e000001790000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640100000659000001af0000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b010000080e000001aa0000000000000000fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000078000000061fc010000000dfb000000200052006500700072006f006a00650063007400650064002000500061007400680000000000000000ab000000ab00fffffffb00000024005300750070006500720076006900730069006f006e0020005300690067006e0061006c00000000b100000592000000b000fffffffb0000001c0054007200610076006500720073006100620069006c0069007400790000000649000000ac0000009100fffffffb000000160055006e006300650072007400610069006e0074007900000006fb000000850000008500fffffffb000000160050006c0061006300650068006f006c006400650072000000000000000eb80000000000000000fb0000001c0054007200610076006500720073006100620069006c006900740079010000031f0000017d0000000000000000fb000000160055006e006300650072007400610069006e0074007901000004a2000002960000000000000000fb000000140043006f006e0066006900640065006e006300650000000000000003d40000000000000000fb0000001c0054007200610076006500720073006100620069006c0069007400790000000231000001f20000000000000000fb0000001c004100530020004c0065006600740020002800630061006d003300290000000000000007380000000000000000fb0000001e004100530020005200690067006800740020002800630061006d0035002900000003f0000003480000000000000000fb0000000800540069006d00650000000000000007800000041800fffffffb0000000800540069006d0065010000000000000450000000000000000000000bd20000055b00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + R Confidence Raw: + collapsed: false + R Input Image: + collapsed: false + R Traversability Overlay: + collapsed: false + R Traversability Raw: + collapsed: false + Reprojected Path: + collapsed: false + Selection: + collapsed: false + Supervision Signal: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Traversability: + collapsed: false + Traversability Raw: + collapsed: false + Uncertainty: + collapsed: false + Views: + collapsed: false + Width: 3768 + X: 1992 + Y: 27 diff --git a/wild_visual_navigation_anymal/config/rviz/deprecated/anymal.rviz b/wild_visual_navigation_anymal/config/rviz/deprecated/anymal.rviz new file mode 100644 index 00000000..5dabe460 --- /dev/null +++ b/wild_visual_navigation_anymal/config/rviz/deprecated/anymal.rviz @@ -0,0 +1,1540 @@ +Panels: + - Class: rviz/Displays + Help Height: 0 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /TF1/Tree1 + - /Wild Visual Navigation1 + - /Wild Visual Navigation1/Cameras1 + - /Wild Visual Navigation1/Prediction1 + - /Wild Visual Navigation1/Prediction1/F Traversability Raw1 + - /Wild Visual Navigation1/Prediction1/F Learning Mask1 + - /Wild Visual Navigation1/Prediction1/F Confidence Overlay1 + - /Wild Visual Navigation1/LR Prediction1 + - /Elevation Map WIFI1/Local Planner (SDF)1 + - /Elevation Map RAW1 + Splitter Ratio: 0.4659898579120636 + Tree Height: 698 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.5886790156364441 + - Class: rviz/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Name: Time + SyncMode: 0 + SyncSource: velodyne +Preferences: + PromptSaveOnExit: true +Toolbars: + toolButtonStyle: 2 +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: true + Line Style: + Line Width: 0.029999999329447746 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 20 + Reference Frame: + Value: true + - Class: rviz/TF + Enabled: false + Frame Timeout: 1000 + Frames: + All Enabled: false + Marker Alpha: 1 + Marker Scale: 0.5 + Name: TF + Show Arrows: true + Show Axes: true + Show Names: true + Tree: + {} + Update Interval: 0 + Value: false + - Class: rviz/Group + Displays: + - Angle Tolerance: 0.10000000149011612 + Class: rviz/Odometry + Covariance: + Orientation: + Alpha: 0.5 + Color: 255; 255; 127 + Color Style: Unique + Frame: Local + Offset: 1 + Scale: 1 + Value: true + Position: + Alpha: 0.30000001192092896 + Color: 204; 51; 204 + Scale: 1 + Value: true + Value: true + Enabled: false + Keep: 200 + Name: Odometry + Position Tolerance: 1 + Queue Size: 10 + Shape: + Alpha: 1 + Axes Length: 1 + Axes Radius: 0.10000000149011612 + Color: 255; 25; 0 + Head Length: 0.30000001192092896 + Head Radius: 0.10000000149011612 + Shaft Length: 1 + Shaft Radius: 0.05000000074505806 + Value: Arrow + Topic: /loam/odometry + Unreliable: false + Value: false + - Alpha: 1 + Class: rviz/RobotModel + Collision Enabled: false + Enabled: true + Links: + All Links Enabled: true + Expand Joint Details: false + Expand Link Details: false + Expand Tree: false + LF_FOOT: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_HAA_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_HFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false + LF_HIP: + Alpha: 1 + Show Axes: false + Show Trail: false + LF_KFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_SHANK: + Alpha: 1 + Show Axes: false + Show Trail: false + LF_THIGH: + Alpha: 1 + Show Axes: false + Show Trail: false + LF_hip_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_shank_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LF_thigh_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_FOOT: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_HAA_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_HFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false + LH_HIP: + Alpha: 1 + Show Axes: false + Show Trail: false + LH_KFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_SHANK: + Alpha: 1 + Show Axes: false + Show Trail: false + LH_THIGH: + Alpha: 1 + Show Axes: false + Show Trail: false + LH_hip_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_shank_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + LH_thigh_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + Link Tree Style: Links in Alphabetic Order + RF_FOOT: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_HAA_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_HFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false + RF_HIP: + Alpha: 1 + Show Axes: false + Show Trail: false + RF_KFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_SHANK: + Alpha: 1 + Show Axes: false + Show Trail: false + RF_THIGH: + Alpha: 1 + Show Axes: false + Show Trail: false + RF_hip_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_shank_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RF_thigh_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_FOOT: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_HAA_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_HFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false + RH_HIP: + Alpha: 1 + Show Axes: false + Show Trail: false + RH_KFE_drive: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_SHANK: + Alpha: 1 + Show Axes: false + Show Trail: false + RH_THIGH: + Alpha: 1 + Show Axes: false + Show Trail: false + RH_hip_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_shank_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + RH_thigh_fixed: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + base: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + base_inertia: + Alpha: 1 + Show Axes: false + Show Trail: false + battery: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + body_top: + Alpha: 1 + Show Axes: false + Show Trail: false + bottom_shell: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + depth_camera_front_lower_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_front_lower_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_front_upper_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_front_upper_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_left_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_left_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_rear_lower_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_rear_lower_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_rear_upper_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_rear_upper_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_right_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + depth_camera_right_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + docking_socket: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + face_front: + Alpha: 1 + Show Axes: false + Show Trail: false + face_rear: + Alpha: 1 + Show Axes: false + Show Trail: false + face_shell_front: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + face_shell_rear: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + hatch_shell: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + hbc_receiver: + Alpha: 1 + Show Axes: false + Show Trail: false + imu_link: + Alpha: 1 + Show Axes: false + Show Trail: false + lidar: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + lidar_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + top_shell: + Alpha: 1 + Show Axes: false + Show Trail: false + Value: true + wide_angle_camera_front_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + wide_angle_camera_front_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + wide_angle_camera_rear_camera: + Alpha: 1 + Show Axes: false + Show Trail: false + wide_angle_camera_rear_camera_parent: + Alpha: 1 + Show Axes: false + Show Trail: false + Name: RobotModel + Robot Description: anymal_description + TF Prefix: "" + Update Interval: 0 + Value: true + Visual Enabled: true + Enabled: true + Name: Robot Info + - Class: rviz/Group + Displays: + - Class: rviz/Group + Displays: + - Class: rviz/Marker + Enabled: true + Marker Topic: /wild_visual_navigation_node/graph_footprints + Name: Footprint + Namespaces: + footprints: true + Queue Size: 100 + Value: true + - Alpha: 1 + Buffer Length: 1 + Class: rviz/Path + Color: 40; 87; 244 + Enabled: true + Head Diameter: 0.30000001192092896 + Head Length: 0.20000000298023224 + Length: 0.30000001192092896 + Line Style: Billboards + Line Width: 0.029999999329447746 + Name: Supervision Graph + Offset: + X: 0 + Y: 0 + Z: 0 + Pose Color: 255; 85; 255 + Pose Style: None + Queue Size: 10 + Radius: 0.029999999329447746 + Shaft Diameter: 0.10000000149011612 + Shaft Length: 0.10000000149011612 + Topic: /wild_visual_navigation_node/supervision_graph + Unreliable: false + Value: true + Enabled: true + Name: Self Supervision + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_image_traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Traversability + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_image_confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Uncertainty + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Alpha: 1 + Buffer Length: 1 + Class: rviz/Path + Color: 136; 138; 133 + Enabled: true + Head Diameter: 0.30000001192092896 + Head Length: 0.20000000298023224 + Length: 0.10000000149011612 + Line Style: Billboards + Line Width: 0.05000000074505806 + Name: Mission Graph + Offset: + X: 0 + Y: 0 + Z: 0 + Pose Color: 255; 85; 255 + Pose Style: Axes + Queue Size: 10 + Radius: 0.009999999776482582 + Shaft Diameter: 0.10000000149011612 + Shaft Length: 0.10000000149011612 + Topic: /wild_visual_navigation_node/mission_graph + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_mask + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Reprojected Path + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_labeled + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Supervision Signal + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: "" + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Placeholder + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: false + Name: Self Supervision (Debug) + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: false + Image Topic: /alphasense_driver_ros/cam4/debayered + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: AS Front (cam4) + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /alphasense_driver_ros/cam3/debayered + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: AS Left (cam3) + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /alphasense_driver_ros/cam5/debayered + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: AS Right (cam5) + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: true + Image Topic: /wide_angle_camera_front/image_color + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: AB Wide Angle Front + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: false + Image Topic: "" + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Placeholder + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + Enabled: true + Name: Cameras + - Class: rviz/Group + Displays: + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: false + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: rear-bpearl + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /robot_self_filter/bpearl_rear/point_cloud + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 22.831743240356445 + Min Value: -4.732121467590332 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: false + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: front-bpearl + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /robot_self_filter/bpearl_front/point_cloud + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: false + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-front + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_front/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: false + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-rear + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_rear/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: false + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-left + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_left/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: false + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-right + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_right/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 15.389507293701172 + Min Value: -1.5122853517532349 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: AxisColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: velodyne + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.05000000074505806 + Style: Squares + Topic: /point_cloud_filter/lidar/point_cloud_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + Enabled: true + Name: Depth Sensors + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/front/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_overlay + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Learning Mask + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_visu_traversability/traversability_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Traversability Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_visu_0/traversability_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Traversability Overlay Replay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_visu_confidence/confidence_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Confidence Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_visu_1/confidence_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Confidence Overlay Replay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + Enabled: true + Name: Prediction + - Class: rviz/Group + Displays: + - Class: rviz/Marker + Enabled: true + Marker Topic: /field_local_planner/real_carrot + Name: Goal (carrot) + Namespaces: + {} + Queue Size: 100 + Value: true + - Alpha: 1 + Axes Length: 1 + Axes Radius: 0.10000000149011612 + Class: rviz/PoseWithCovariance + Color: 255; 25; 0 + Covariance: + Orientation: + Alpha: 0.5 + Color: 255; 255; 127 + Color Style: Unique + Frame: Local + Offset: 1 + Scale: 1 + Value: true + Position: + Alpha: 0.30000001192092896 + Color: 204; 51; 204 + Scale: 1 + Value: true + Value: true + Enabled: true + Head Length: 0.30000001192092896 + Head Radius: 0.10000000149011612 + Name: Goal (pose) + Queue Size: 10 + Shaft Length: 1 + Shaft Radius: 0.05000000074505806 + Shape: Arrow + Topic: /field_local_planner/current_goal + Unreliable: false + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: geodesic + Color Transformer: IntensityLayer + ColorMap: plasma + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_inpainted + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 5 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Geodesic Field (GDF) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + Enabled: true + Name: Local planner + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/left/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/left/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/left/confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Confidence Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/right/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/right/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/right/confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Confidence Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + Enabled: false + Name: LR Prediction + Enabled: true + Name: Wild Visual Navigation + - Class: rviz/Group + Displays: + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: elevation + Color Transformer: IntensityLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Elevation + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: elevation_inpainted + Color Transformer: IntensityLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_inpainted + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Elevation (Inpainted) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: geodesic + Color Transformer: IntensityLayer + ColorMap: plasma + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_inpainted + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 5 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Local Planner (GDF) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 0.5 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: sdf + Color Transformer: GridMapLayer + ColorMap: turbo + Enabled: true + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 2 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Local Planner (SDF) + Show Grid Lines: false + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: rgb_image + Color Transformer: ColorLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: RGB + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map_raw + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: visual_traversability_inpainted + Color Transformer: GridMapLayer + ColorMap: coolwarm + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_inpainted + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 10 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Visual Traversability Inpainted + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + Enabled: false + Name: Elevation Map WIFI + - Class: rviz/Group + Displays: + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: elevation + Color Transformer: GridMapLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: false + Max Color: 255; 255; 255 + Max Intensity: 10 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Elevation + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map_raw + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: geodesic + Color Transformer: IntensityLayer + ColorMap: plasma + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_inpainted + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 5 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Local Planner (GDF) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: elevation_inpainted + Color Transformer: IntensityLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_inpainted + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Elevation (Inpainted) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: sdf + Color Transformer: IntensityLayer + ColorMap: turbo + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: Layer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 2 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Local Planner (SDF) + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_wifi + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: rgb_image + Color Transformer: ColorLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: "" + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: RGB + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map_raw + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: visual_traversability + Color Transformer: "" + ColorMap: coolwarm + Enabled: true + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_with_semantic + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 10 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Visual Traversability + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map_raw + Unreliable: false + Use ColorMap: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: traversability + Color Transformer: "" + ColorMap: coolwarm + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 10 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Geom Traversability + Show Grid Lines: true + Topic: /elevation_mapping/elevation_map_raw + Unreliable: false + Use ColorMap: true + Value: false + Enabled: true + Name: Elevation Map RAW + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_visu_0/traversability_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Trav Over + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: true + Global Options: + Background Color: 255; 255; 255 + Default Light: true + Fixed Frame: base + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Theta std deviation: 0.2617993950843811 + Topic: /initialpose + X std deviation: 0.5 + Y std deviation: 0.5 + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/ThirdPersonFollower + Distance: 18.592376708984375 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Field of View: 0.7853981852531433 + Focal Point: + X: 0.8738794326782227 + Y: 1.1860613822937012 + Z: -5.593326091766357 + Focal Shape Fixed Size: false + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Current View + Near Clip Distance: 0.009999999776482582 + Pitch: 0.6897974610328674 + Target Frame: base + Yaw: 5.176687240600586 + Saved: ~ +Window Geometry: + AB Wide Angle Front: + collapsed: false + AS Front (cam4): + collapsed: false + AS Left (cam3): + collapsed: false + AS Right (cam5): + collapsed: false + Displays: + collapsed: false + F Confidence Overlay: + collapsed: false + F Confidence Overlay Replay: + collapsed: false + F Input Image: + collapsed: false + F Learning Mask: + collapsed: false + F Trav Over: + collapsed: false + F Traversability Overlay: + collapsed: false + F Traversability Overlay Replay: + collapsed: false + F Traversability Raw: + collapsed: false + Height: 1376 + Hide Left Dock: false + Hide Right Dock: true + L Confidence Raw: + collapsed: false + L Input Image: + collapsed: false + L Traversability Raw: + collapsed: false + Placeholder: + collapsed: false + QMainWindow State: 000000ff00000000fd0000000400000000000001dd000002f7fc0200000011fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000005fb000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000024c000002f7000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d00650072006100000002ea000000c80000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640000000259000000520000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b000000027e000000b80000000000000000fb0000000a0049006d00610067006500000002eb000001280000000000000000fb000000140043006f006e0066006900640065006e0063006500000003b3000000c60000000000000000fb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c0061007900000002be000000160000001600fffffffb00000030004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c0061007900000002ea0000001a0000001600fffffffb00000036004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790020005200650070006c0061007900000002fc0000001e0000001600fffffffb0000003e004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790020005200650070006c00610079000000033c0000009f0000001600ffffff000000010000010f0000025ffc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000017c0000025f000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b2000000000000000000000002000009b800000209fc010000000bfb0000001c0049006d00610067006500200046006500610074007500720065007300000000bf000003690000000000000000fc000000000000029b000000c200fffffffa000000010200000004fb0000000a0049006d0061006700650000000000ffffffff0000000000000000fb00000026004100420020005700690064006500200041006e0067006c0065002000460072006f006e00740100000000ffffffff0000001600fffffffb0000002400460072006f006e0074002000280063006f006d007000720065007300730065006400290000000000ffffffff0000000000000000fb0000001e00410053002000460072006f006e00740020002800630061006d00340029000000003d0000010c0000001600fffffffc000002a1000002820000009100fffffffa000000000200000003fb0000001a004600200049006e00700075007400200049006d006100670065010000003d000001ac0000001600fffffffb0000001a005200200049006e00700075007400200049006d0061006700650000000000ffffffff0000001600fffffffb0000001a004c00200049006e00700075007400200049006d0061006700650000000000ffffffff0000001600fffffffb0000000a0049006d00610067006501000003240000018a0000000000000000fc00000529000002390000008100fffffffa000000000200000006fb000000160046002000540072006100760020004f0076006500720100000000ffffffff0000001600fffffffb0000003e004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790020005200650070006c006100790100000000ffffffff0000000000000000fb00000028004600200054007200610076006500720073006100620069006c0069007400790020005200610077000000003d0000012b0000001600fffffffb00000028005200200054007200610076006500720073006100620069006c00690074007900200052006100770000000000ffffffff0000001600fffffffb00000028004c00200054007200610076006500720073006100620069006c00690074007900200052006100770000000000ffffffff0000001600fffffffb00000030004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790000000000ffffffff0000000000000000fc0000076800000250000000a200fffffffa000000010200000005fb00000036004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790020005200650070006c006100790100000000ffffffff0000000000000000fb0000001e00460020004c006500610072006e0069006e00670020004d00610073006b010000003d0000012b0000001600fffffffb00000020005200200043006f006e0066006900640065006e0063006500200052006100770000000000ffffffff0000001600fffffffb00000020004c00200043006f006e0066006900640065006e0063006500200052006100770000000000ffffffff0000001600fffffffb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790000000000ffffffff0000000000000000fb000000160050006c0061006300650068006f006c006400650072000000054b000001ed0000008500fffffffb000000160049006e00700075007400200049006d006100670065010000031e000001790000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640100000659000001af0000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b010000080e000001aa0000000000000000fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073800000061fc010000000dfb000000200052006500700072006f006a00650063007400650064002000500061007400680000000000000000ab000000ab00fffffffb00000024005300750070006500720076006900730069006f006e0020005300690067006e0061006c00000000b1000004d2000000b000fffffffb0000001c0054007200610076006500720073006100620069006c0069007400790000000589000000f60000009100fffffffb000000160055006e006300650072007400610069006e007400790000000685000000b30000008500fffffffb000000160050006c0061006300650068006f006c0064006500720000000000000007380000008500fffffffb0000001c0054007200610076006500720073006100620069006c006900740079010000031f0000017d0000000000000000fb000000160055006e006300650072007400610069006e0074007901000004a2000002960000000000000000fb000000140043006f006e0066006900640065006e006300650000000000000003d40000000000000000fb0000001c0054007200610076006500720073006100620069006c0069007400790000000231000001f20000000000000000fb0000001c004100530020004c0065006600740020002800630061006d003300290000000000000007380000009700fffffffb0000001e004100530020005200690067006800740020002800630061006d0035002900000003f000000348000000a000fffffffb0000000800540069006d00650000000000000007800000041800fffffffb0000000800540069006d00650100000000000004500000000000000000000007d5000002f700000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + R Confidence Raw: + collapsed: false + R Input Image: + collapsed: false + R Traversability Raw: + collapsed: false + Reprojected Path: + collapsed: false + Selection: + collapsed: false + Supervision Signal: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Traversability: + collapsed: false + Uncertainty: + collapsed: false + Views: + collapsed: true + Width: 2488 + X: 72 + Y: 27 diff --git a/wild_visual_navigation_ros/config/rviz/anymal_debug.rviz b/wild_visual_navigation_anymal/config/rviz/deprecated/anymal_debug.rviz similarity index 99% rename from wild_visual_navigation_ros/config/rviz/anymal_debug.rviz rename to wild_visual_navigation_anymal/config/rviz/deprecated/anymal_debug.rviz index 9c9f3c06..adb8ecd4 100644 --- a/wild_visual_navigation_ros/config/rviz/anymal_debug.rviz +++ b/wild_visual_navigation_anymal/config/rviz/deprecated/anymal_debug.rviz @@ -524,7 +524,7 @@ Visualization Manager: Length: 0.30000001192092896 Line Style: Billboards Line Width: 0.029999999329447746 - Name: Proprio Graph + Name: Supervision Graph Offset: X: 0 Y: 0 @@ -535,7 +535,7 @@ Visualization Manager: Radius: 0.029999999329447746 Shaft Diameter: 0.10000000149011612 Shaft Length: 0.10000000149011612 - Topic: /wild_visual_navigation_node/proprioceptive_graph + Topic: /wild_visual_navigation_node/supervision_graph Unreliable: false Value: true Enabled: true diff --git a/wild_visual_navigation_ros/config/rviz/anymal.rviz b/wild_visual_navigation_anymal/config/rviz/deprecated/anymal_dodo.rviz similarity index 80% rename from wild_visual_navigation_ros/config/rviz/anymal.rviz rename to wild_visual_navigation_anymal/config/rviz/deprecated/anymal_dodo.rviz index b8fef33f..3f4ec12c 100644 --- a/wild_visual_navigation_ros/config/rviz/anymal.rviz +++ b/wild_visual_navigation_anymal/config/rviz/deprecated/anymal_dodo.rviz @@ -7,12 +7,12 @@ Panels: - /Global Options1 - /TF1/Tree1 - /Wild Visual Navigation1 - - /Wild Visual Navigation1/Depth Sensors1 - - /Wild Visual Navigation1/Prediction1 - - /Elevation Map WIFI1 + - /Wild Visual Navigation1/Depth Sensors1/realsense-front1 - /Elevation Map WIFI1/Local Planner (SDF)1 - Splitter Ratio: 0.6102783679962158 - Tree Height: 546 + - /Elevation Map RAW1 + - /Elevation Map RAW1/Visual Traversability1 + Splitter Ratio: 0.5789473652839661 + Tree Height: 685 - Class: rviz/Selection Name: Selection - Class: rviz/Tool Properties @@ -28,10 +28,9 @@ Panels: Name: Views Splitter Ratio: 0.5 - Class: rviz/Time - Experimental: false Name: Time SyncMode: 0 - SyncSource: Input Image + SyncSource: Traversability Raw Preferences: PromptSaveOnExit: true Toolbars: @@ -123,21 +122,25 @@ Visualization Manager: Show Axes: false Show Trail: false Value: true - LF_HAA: + LF_HAA_drive: Alpha: 1 Show Axes: false Show Trail: false Value: true - LF_HFE: + LF_HFE_drive: Alpha: 1 Show Axes: false Show Trail: false Value: true + LF_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false LF_HIP: Alpha: 1 Show Axes: false Show Trail: false - LF_KFE: + LF_KFE_drive: Alpha: 1 Show Axes: false Show Trail: false @@ -170,21 +173,25 @@ Visualization Manager: Show Axes: false Show Trail: false Value: true - LH_HAA: + LH_HAA_drive: Alpha: 1 Show Axes: false Show Trail: false Value: true - LH_HFE: + LH_HFE_drive: Alpha: 1 Show Axes: false Show Trail: false Value: true + LH_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false LH_HIP: Alpha: 1 Show Axes: false Show Trail: false - LH_KFE: + LH_KFE_drive: Alpha: 1 Show Axes: false Show Trail: false @@ -218,21 +225,25 @@ Visualization Manager: Show Axes: false Show Trail: false Value: true - RF_HAA: + RF_HAA_drive: Alpha: 1 Show Axes: false Show Trail: false Value: true - RF_HFE: + RF_HFE_drive: Alpha: 1 Show Axes: false Show Trail: false Value: true + RF_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false RF_HIP: Alpha: 1 Show Axes: false Show Trail: false - RF_KFE: + RF_KFE_drive: Alpha: 1 Show Axes: false Show Trail: false @@ -265,21 +276,25 @@ Visualization Manager: Show Axes: false Show Trail: false Value: true - RH_HAA: + RH_HAA_drive: Alpha: 1 Show Axes: false Show Trail: false Value: true - RH_HFE: + RH_HFE_drive: Alpha: 1 Show Axes: false Show Trail: false Value: true + RH_HFE_output: + Alpha: 1 + Show Axes: false + Show Trail: false RH_HIP: Alpha: 1 Show Axes: false Show Trail: false - RH_KFE: + RH_KFE_drive: Alpha: 1 Show Axes: false Show Trail: false @@ -307,11 +322,6 @@ Visualization Manager: Show Axes: false Show Trail: false Value: true - alphasense_mesh: - Alpha: 1 - Show Axes: false - Show Trail: false - Value: true base: Alpha: 1 Show Axes: false @@ -335,74 +345,43 @@ Visualization Manager: Show Axes: false Show Trail: false Value: true - cam0_sensor_frame: - Alpha: 1 - Show Axes: false - Show Trail: false - cam1_sensor_frame: - Alpha: 1 - Show Axes: false - Show Trail: false - cam2_sensor_frame: - Alpha: 1 - Show Axes: false - Show Trail: false - cam3_sensor_frame: - Alpha: 1 - Show Axes: false - Show Trail: false - cam3_sensor_frame_helper: + depth_camera_front_lower_camera: Alpha: 1 Show Axes: false Show Trail: false - cam4_sensor_frame: + depth_camera_front_lower_camera_parent: Alpha: 1 Show Axes: false Show Trail: false - cam4_sensor_frame_helper: + depth_camera_front_upper_camera: Alpha: 1 Show Axes: false Show Trail: false - cam5_sensor_frame: + depth_camera_front_upper_camera_parent: Alpha: 1 Show Axes: false Show Trail: false - cam5_sensor_frame_helper: - Alpha: 1 - Show Axes: false - Show Trail: false - cam6_sensor_frame: - Alpha: 1 - Show Axes: false - Show Trail: false - cam6_sensor_frame_helper: - Alpha: 1 - Show Axes: false - Show Trail: false - depth_camera_front_camera: + depth_camera_left_camera: Alpha: 1 Show Axes: false Show Trail: false - Value: true - depth_camera_front_camera_parent: + depth_camera_left_camera_parent: Alpha: 1 Show Axes: false Show Trail: false - depth_camera_left_camera: + depth_camera_rear_lower_camera: Alpha: 1 Show Axes: false Show Trail: false - Value: true - depth_camera_left_camera_parent: + depth_camera_rear_lower_camera_parent: Alpha: 1 Show Axes: false Show Trail: false - depth_camera_rear_camera: + depth_camera_rear_upper_camera: Alpha: 1 Show Axes: false Show Trail: false - Value: true - depth_camera_rear_camera_parent: + depth_camera_rear_upper_camera_parent: Alpha: 1 Show Axes: false Show Trail: false @@ -410,68 +389,55 @@ Visualization Manager: Alpha: 1 Show Axes: false Show Trail: false - Value: true depth_camera_right_camera_parent: Alpha: 1 Show Axes: false Show Trail: false - docking_hatch_cover: + docking_socket: Alpha: 1 Show Axes: false Show Trail: false + Value: true face_front: Alpha: 1 Show Axes: false Show Trail: false - Value: true face_rear: Alpha: 1 Show Axes: false Show Trail: false - Value: true - front_handle: + face_shell_front: Alpha: 1 Show Axes: false Show Trail: false Value: true - handle: + face_shell_rear: Alpha: 1 Show Axes: false Show Trail: false Value: true - hatch: + hatch_shell: Alpha: 1 Show Axes: false Show Trail: false Value: true - imu_link: + hbc_receiver: Alpha: 1 Show Axes: false Show Trail: false - imu_sensor_frame: - Alpha: 1 - Show Axes: false - Show Trail: false - interface_hatch: + imu_link: Alpha: 1 Show Axes: false Show Trail: false - Value: true lidar: Alpha: 1 Show Axes: false Show Trail: false Value: true - perception_head_cage: - Alpha: 1 - Show Axes: false - Show Trail: false - Value: true - remote: + lidar_parent: Alpha: 1 Show Axes: false Show Trail: false - Value: true top_shell: Alpha: 1 Show Axes: false @@ -481,7 +447,6 @@ Visualization Manager: Alpha: 1 Show Axes: false Show Trail: false - Value: true wide_angle_camera_front_camera_parent: Alpha: 1 Show Axes: false @@ -490,7 +455,6 @@ Visualization Manager: Alpha: 1 Show Axes: false Show Trail: false - Value: true wide_angle_camera_rear_camera_parent: Alpha: 1 Show Axes: false @@ -508,13 +472,13 @@ Visualization Manager: - Class: rviz/Group Displays: - Class: rviz/Marker - Enabled: true + Enabled: false Marker Topic: /wild_visual_navigation_node/graph_footprints Name: Footprint Namespaces: {} Queue Size: 100 - Value: true + Value: false - Alpha: 1 Buffer Length: 1 Class: rviz/Path @@ -525,7 +489,7 @@ Visualization Manager: Length: 0.30000001192092896 Line Style: Billboards Line Width: 0.029999999329447746 - Name: Proprio Graph + Name: Supervision Graph Offset: X: 0 Y: 0 @@ -536,7 +500,7 @@ Visualization Manager: Radius: 0.029999999329447746 Shaft Diameter: 0.10000000149011612 Shaft Length: 0.10000000149011612 - Topic: /wild_visual_navigation_node/proprioceptive_graph + Topic: /wild_visual_navigation_node/supervision_graph Unreliable: false Value: true Enabled: true @@ -631,42 +595,6 @@ Visualization Manager: Name: Self Supervision (Debug) - Class: rviz/Group Displays: - - Class: rviz/Image - Enabled: true - Image Topic: /alphasense_driver_ros/cam4/debayered - Max Value: 1 - Median window: 5 - Min Value: 0 - Name: AS Front (cam4) - Normalize Range: true - Queue Size: 2 - Transport Hint: raw - Unreliable: false - Value: true - - Class: rviz/Image - Enabled: false - Image Topic: /alphasense_driver_ros/cam3/debayered - Max Value: 1 - Median window: 5 - Min Value: 0 - Name: AS Left (cam3) - Normalize Range: true - Queue Size: 2 - Transport Hint: raw - Unreliable: false - Value: false - - Class: rviz/Image - Enabled: false - Image Topic: /alphasense_driver_ros/cam5/debayered - Max Value: 1 - Median window: 5 - Min Value: 0 - Name: AS Right (cam5) - Normalize Range: true - Queue Size: 2 - Transport Hint: raw - Unreliable: false - Value: false - Class: rviz/Image Enabled: false Image Topic: /wide_angle_camera_front/image_color @@ -774,7 +702,7 @@ Visualization Manager: Size (Pixels): 3 Size (m): 0.009999999776482582 Style: Squares - Topic: /depth_camera_front/point_cloud_self_filtered + Topic: /depth_camera_front_upper/point_cloud_self_filtered Unreliable: false Use Fixed Frame: true Use rainbow: true @@ -866,8 +794,8 @@ Visualization Manager: - Alpha: 1 Autocompute Intensity Bounds: true Autocompute Value Bounds: - Max Value: 12.096750259399414 - Min Value: -2.868344783782959 + Max Value: 15.884197235107422 + Min Value: -2.124349355697632 Value: true Axis: Z Channel Name: intensity @@ -875,7 +803,7 @@ Visualization Manager: Color: 239; 41; 41 Color Transformer: AxisColor Decay Time: 0 - Enabled: true + Enabled: false Invert Rainbow: false Max Color: 255; 255; 255 Min Color: 0; 0; 0 @@ -890,7 +818,7 @@ Visualization Manager: Unreliable: false Use Fixed Frame: true Use rainbow: true - Value: true + Value: false Enabled: true Name: Depth Sensors - Class: rviz/Group @@ -908,7 +836,7 @@ Visualization Manager: Unreliable: false Value: true - Class: rviz/Image - Enabled: false + Enabled: true Image Topic: /wild_visual_navigation_node/front/traversability Max Value: 1 Median window: 5 @@ -918,38 +846,74 @@ Visualization Manager: Queue Size: 2 Transport Hint: raw Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_overlay + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Learning Mask + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_visu_traversability/traversability_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Traversability Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false Value: false - Class: rviz/Image Enabled: false - Image Topic: /wild_visual_navigation_node/front/confidence + Image Topic: /wild_visual_navigation_visu_0/traversability_overlayed Max Value: 1 Median window: 5 Min Value: 0 - Name: Confidence Raw + Name: F Traversability Overlay Replay Normalize Range: true Queue Size: 2 Transport Hint: raw Unreliable: false Value: false - Class: rviz/Image - Enabled: true - Image Topic: /wild_visual_navigation_visu_traversability/traversability_overlayed + Enabled: false + Image Topic: /wild_visual_navigation_visu_confidence/confidence_overlayed Max Value: 1 Median window: 5 Min Value: 0 - Name: Traversability Overlay + Name: F Confidence Overlay Normalize Range: true Queue Size: 2 Transport Hint: raw Unreliable: false - Value: true + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_visu_1/confidence_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Confidence Overlay Replay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false - Class: rviz/Image Enabled: true - Image Topic: /wild_visual_navigation_visu_confidence/confidence_overlayed + Image Topic: /wild_visual_navigation_visu_0/traversability_overlayed Max Value: 1 Median window: 5 Min Value: 0 - Name: Confidence Overlay + Name: Traversability Overlay Normalize Range: true Queue Size: 2 Transport Hint: raw @@ -964,7 +928,7 @@ Visualization Manager: Marker Topic: /field_local_planner/real_carrot Name: Goal (carrot) Namespaces: - carrot: true + {} Queue Size: 100 Value: true - Alpha: 1 @@ -1024,6 +988,82 @@ Visualization Manager: Value: false Enabled: true Name: Local planner + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/left/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/left/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/left/confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: L Confidence Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/right/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/right/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/right/confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Confidence Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: false + Name: LR Prediction Enabled: true Name: Wild Visual Navigation - Class: rviz/Group @@ -1172,7 +1212,7 @@ Visualization Manager: Unreliable: false Use ColorMap: true Value: false - Enabled: true + Enabled: false Name: Elevation Map WIFI - Class: rviz/Group Displays: @@ -1303,13 +1343,13 @@ Visualization Manager: Color Layer: visual_traversability Color Transformer: "" ColorMap: coolwarm - Enabled: false + Enabled: true Grid Cell Decimation: 1 Grid Line Thickness: 0.10000000149011612 - Height Layer: visual_traversability + Height Layer: elevation_semantic Height Transformer: GridMapLayer History Length: 1 - Invert ColorMap: true + Invert ColorMap: false Max Color: 255; 255; 255 Max Intensity: 10 Min Color: 0; 0; 0 @@ -1319,7 +1359,7 @@ Visualization Manager: Topic: /elevation_mapping/semantic_map_raw Unreliable: false Use ColorMap: true - Value: false + Value: true - Alpha: 1 Autocompute Intensity Bounds: true Class: grid_map_rviz_plugin/GridMap @@ -1374,7 +1414,7 @@ Visualization Manager: Views: Current: Class: rviz/ThirdPersonFollower - Distance: 17.14068031311035 + Distance: 12.582637786865234 Enable Stereo Rendering: Stereo Eye Separation: 0.05999999865889549 Stereo Focal Distance: 1 @@ -1382,41 +1422,53 @@ Visualization Manager: Value: false Field of View: 0.7853981852531433 Focal Point: - X: 3.1985225677490234 - Y: 1.6920582056045532 - Z: -5.593326568603516 + X: 1.0811700820922852 + Y: -0.22880220413208008 + Z: -5.593325614929199 Focal Shape Fixed Size: false Focal Shape Size: 0.05000000074505806 Invert Z Axis: false Name: Current View Near Clip Distance: 0.009999999776482582 - Pitch: 1.5297969579696655 + Pitch: 0.8147973418235779 Target Frame: base - Yaw: 0.04853395000100136 + Yaw: 3.7585225105285645 Saved: ~ Window Geometry: AB Wide Angle Front: collapsed: false - AS Front (cam4): - collapsed: false - AS Left (cam3): + Displays: collapsed: false - AS Right (cam5): + F Confidence Overlay: collapsed: false - Confidence Overlay: + F Confidence Overlay Replay: collapsed: false - Confidence Raw: + F Traversability Overlay: collapsed: false - Displays: + F Traversability Overlay Replay: collapsed: false - Height: 1016 + Height: 1376 Hide Left Dock: false Hide Right Dock: true Input Image: collapsed: false + L Confidence Raw: + collapsed: false + L Input Image: + collapsed: false + L Traversability Raw: + collapsed: false + Learning Mask: + collapsed: false Placeholder: collapsed: false - QMainWindow State: 000000ff00000000fd0000000400000000000001560000025ffc020000000dfb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000005fb000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c006100790073010000017c0000025f000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d00650072006100000002ea000000c80000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640000000259000000520000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b000000027e000000b80000000000000000fb0000000a0049006d00610067006500000002eb000001280000000000000000fb000000140043006f006e0066006900640065006e0063006500000003b3000000c60000000000000000000000010000010f0000025ffc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000017c0000025f000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b20000000000000000000000020000073800000139fc010000000bfb0000001c0049006d00610067006500200046006500610074007500720065007300000000bf000003690000000000000000fc0000000000000268000000c200fffffffa000000020200000003fb00000026004100420020005700690064006500200041006e0067006c0065002000460072006f006e00740100000000ffffffff0000001600fffffffb0000002400460072006f006e0074002000280063006f006d007000720065007300730065006400290000000000ffffffff0000000000000000fb0000001e00410053002000460072006f006e00740020002800630061006d00340029010000003d0000010c0000001600fffffffb000000160049006e00700075007400200049006d006100670065010000026e000001b00000008600fffffffb0000000a0049006d00610067006501000003240000018a0000000000000000fc0000042400000194000000c800fffffffa000000000200000002fb0000002c0054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790100000000ffffffff0000001600fffffffb000000240054007200610076006500720073006100620069006c0069007400790020005200610077010000003d0000012b0000001600fffffffc000005be0000017a000000bb00fffffffa000000000200000002fb000000240043006f006e0066006900640065006e006300650020004f007600650072006c006100790100000000ffffffff0000001600fffffffb0000001c0043006f006e0066006900640065006e006300650020005200610077010000003d0000012b0000001600fffffffb000000160050006c0061006300650068006f006c0064006500720000000492000002a60000008500fffffffb000000160049006e00700075007400200049006d006100670065010000031e000001790000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640100000659000001af0000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b010000080e000001aa0000000000000000fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000073800000061fc010000000dfb000000200052006500700072006f006a006500630074006500640020005000610074006800000000000000011b000000ab00fffffffb00000024005300750070006500720076006900730069006f006e0020005300690067006e0061006c00000001210000011b000000b000fffffffb0000001c0054007200610076006500720073006100620069006c00690074007900000002420000011d0000009100fffffffb000000160055006e006300650072007400610069006e0074007900000003650000011b0000008500fffffffb000000160050006c0061006300650068006f006c0064006500720000000000000007380000008500fffffffb0000001c0054007200610076006500720073006100620069006c006900740079010000031f0000017d0000000000000000fb000000160055006e006300650072007400610069006e0074007901000004a2000002960000000000000000fb000000140043006f006e0066006900640065006e006300650000000000000003d40000000000000000fb0000001c0054007200610076006500720073006100620069006c0069007400790000000231000001f20000000000000000fb0000001c004100530020004c0065006600740020002800630061006d003300290000000000000007380000009700fffffffb0000001e004100530020005200690067006800740020002800630061006d0035002900000003f000000348000000a000fffffffb0000000800540069006d0065000000000000000780000002eb00fffffffb0000000800540069006d00650100000000000004500000000000000000000005dc0000025f00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + QMainWindow State: 000000ff00000000fd00000004000000000000026c000002eafc0200000013fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000005fb000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c0061007900730100000259000002ea000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d00650072006100000002ea000000c80000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640000000259000000520000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b000000027e000000b80000000000000000fb0000000a0049006d00610067006500000002eb000001280000000000000000fb000000140043006f006e0066006900640065006e0063006500000003b3000000c60000000000000000fb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c0061007900000002be000000160000001600fffffffb00000030004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c0061007900000005290000001a0000001600fffffffb00000036004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790020005200650070006c0061007900000002fc0000001e0000001600fffffffb0000003e004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790020005200650070006c0061007900000004a40000009f0000001600fffffffb00000028004600200054007200610076006500720073006100620069006c00690074007900200052006100770000000482000000c10000000000000000fb00000028004600200054007200610076006500720073006100620069006c00690074007900200052006100770000000482000000c10000000000000000000000010000010f0000025ffc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a00560069006500770073000000017c0000025f000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b2000000000000000000000002000009b800000216fc010000000cfb0000001c0049006d00610067006500200046006500610074007500720065007300000000bf000003690000000000000000fc00000000000001af0000000000fffffffa000000000200000004fb0000000a0049006d0061006700650000000000ffffffff0000000000000000fb00000026004100420020005700690064006500200041006e0067006c0065002000460072006f006e00740000000000ffffffff0000001600fffffffb0000002400460072006f006e0074002000280063006f006d007000720065007300730065006400290000000000ffffffff0000000000000000fb0000001e00410053002000460072006f006e00740020002800630061006d00340029000000003d0000010c0000000000000000fc00000000000002c30000008600fffffffa000000000200000003fb000000160049006e00700075007400200049006d006100670065010000003d000001ac0000001600fffffffb0000001a005200200049006e00700075007400200049006d0061006700650000000000ffffffff0000001600fffffffb0000001a004c00200049006e00700075007400200049006d0061006700650000000000ffffffff0000001600fffffffb0000000a0049006d00610067006501000003240000018a0000000000000000fb0000002c0054007200610076006500720073006100620069006c0069007400790020004f007600650072006c0061007901000002c9000001ea000000c800fffffffc000004b90000026e000000b000fffffffa000000020200000006fb000000160046002000540072006100760020004f0076006500720000000000ffffffff0000000000000000fb0000003e004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790020005200650070006c006100790100000000ffffffff0000000000000000fb000000240054007200610076006500720073006100620069006c0069007400790020005200610077010000003d0000012b0000001600fffffffb00000028005200200054007200610076006500720073006100620069006c00690074007900200052006100770000000000ffffffff0000001600fffffffb00000028004c00200054007200610076006500720073006100620069006c00690074007900200052006100770000000000ffffffff0000001600fffffffb00000030004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790000000000ffffffff0000000000000000fc0000072d0000028b0000009700fffffffa000000010200000005fb00000036004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790020005200650070006c006100790100000000ffffffff0000000000000000fb0000001a004c006500610072006e0069006e00670020004d00610073006b010000003d0000012b0000001600fffffffb00000020005200200043006f006e0066006900640065006e0063006500200052006100770000000000ffffffff0000001600fffffffb00000020004c00200043006f006e0066006900640065006e0063006500200052006100770000000000ffffffff0000001600fffffffb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790000000000ffffffff0000000000000000fb000000160050006c0061006300650068006f006c006400650072000000081e0000019a0000008500fffffffb000000160049006e00700075007400200049006d006100670065010000031e000001790000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640100000659000001af0000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b010000080e000001aa0000000000000000fb0000000a00560069006500770073030000004e00000080000002e10000019700000003000009b800000061fc010000000dfb000000200052006500700072006f006a00650063007400650064002000500061007400680000000000000000ab000000ab00fffffffb00000024005300750070006500720076006900730069006f006e0020005300690067006e0061006c00000000b100000797000000b000fffffffb0000001c0054007200610076006500720073006100620069006c006900740079000000084e000000dd0000009100fffffffb000000160055006e006300650072007400610069006e007400790000000931000000870000008500fffffffb000000160050006c0061006300650068006f006c0064006500720000000000000009b80000008500fffffffb0000001c0054007200610076006500720073006100620069006c006900740079010000031f0000017d0000000000000000fb000000160055006e006300650072007400610069006e0074007901000004a2000002960000000000000000fb000000140043006f006e0066006900640065006e006300650000000000000003d40000000000000000fb0000001c0054007200610076006500720073006100620069006c0069007400790000000231000001f20000000000000000fb0000001c004100530020004c0065006600740020002800630061006d003300290000000000000007380000000000000000fb0000001e004100530020005200690067006800740020002800630061006d0035002900000003f0000003480000000000000000fb0000000800540069006d00650000000000000007800000041800fffffffb0000000800540069006d0065010000000000000450000000000000000000000746000002ea00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + R Confidence Raw: + collapsed: false + R Input Image: + collapsed: false + R Traversability Raw: + collapsed: false Reprojected Path: collapsed: false Selection: @@ -1437,6 +1489,6 @@ Window Geometry: collapsed: false Views: collapsed: true - Width: 1848 + Width: 2488 X: 72 Y: 27 diff --git a/wild_visual_navigation_ros/config/rviz/anymal_rerun_wvn.rviz b/wild_visual_navigation_anymal/config/rviz/deprecated/anymal_rerun_wvn.rviz similarity index 99% rename from wild_visual_navigation_ros/config/rviz/anymal_rerun_wvn.rviz rename to wild_visual_navigation_anymal/config/rviz/deprecated/anymal_rerun_wvn.rviz index 662f7156..7afa2bab 100644 --- a/wild_visual_navigation_ros/config/rviz/anymal_rerun_wvn.rviz +++ b/wild_visual_navigation_anymal/config/rviz/deprecated/anymal_rerun_wvn.rviz @@ -525,7 +525,7 @@ Visualization Manager: Length: 0.30000001192092896 Line Style: Billboards Line Width: 0.029999999329447746 - Name: Proprio Graph + Name: Supervision Graph Offset: X: 0 Y: 0 @@ -536,7 +536,7 @@ Visualization Manager: Radius: 0.029999999329447746 Shaft Diameter: 0.10000000149011612 Shaft Length: 0.10000000149011612 - Topic: /wild_visual_navigation_node/proprioceptive_graph + Topic: /wild_visual_navigation_node/supervision_graph Unreliable: false Value: true Enabled: true diff --git a/wild_visual_navigation_ros/config/rviz/gps_trajectory.rviz b/wild_visual_navigation_anymal/config/rviz/deprecated/gps_trajectory.rviz similarity index 100% rename from wild_visual_navigation_ros/config/rviz/gps_trajectory.rviz rename to wild_visual_navigation_anymal/config/rviz/deprecated/gps_trajectory.rviz diff --git a/wild_visual_navigation_ros/config/rviz/video.rviz b/wild_visual_navigation_anymal/config/rviz/deprecated/video.rviz similarity index 99% rename from wild_visual_navigation_ros/config/rviz/video.rviz rename to wild_visual_navigation_anymal/config/rviz/deprecated/video.rviz index b0050252..19b5b88f 100644 --- a/wild_visual_navigation_ros/config/rviz/video.rviz +++ b/wild_visual_navigation_anymal/config/rviz/deprecated/video.rviz @@ -486,7 +486,7 @@ Visualization Manager: Length: 0.30000001192092896 Line Style: Billboards Line Width: 0.029999999329447746 - Name: Proprio Graph + Name: Supervision Graph Offset: X: 0 Y: 0 @@ -497,7 +497,7 @@ Visualization Manager: Radius: 0.029999999329447746 Shaft Diameter: 0.10000000149011612 Shaft Length: 0.10000000149011612 - Topic: /wild_visual_navigation_node/proprioceptive_graph + Topic: /wild_visual_navigation_node/supervision_graph Unreliable: false Value: true Enabled: true diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense.yaml new file mode 100644 index 00000000..82ab4cb2 --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense.yaml @@ -0,0 +1,26 @@ +camera_topics: + front: + image_topic: "/alphasense_driver_ros/cam4/debayered" + info_topic: "/alphasense_driver_ros/cam4/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + left: + image_topic: "/alphasense_driver_ros/cam3/debayered" + info_topic: "/alphasense_driver_ros/cam3/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + right: + image_topic: "/alphasense_driver_ros/cam5/debayered" + info_topic: "/alphasense_driver_ros/cam5/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_compressed.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_compressed.yaml new file mode 100644 index 00000000..3017f436 --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_compressed.yaml @@ -0,0 +1,26 @@ +camera_topics: + front: + image_topic: "/alphasense_driver_ros/cam4/color_rect/image/compressed" + info_topic: "/alphasense_driver_ros/cam4/color_rect/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + left: + image_topic: "/alphasense_driver_ros/cam3/color_rect/image/compressed" + info_topic: "/alphasense_driver_ros/cam3/color_rect/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + right: + image_topic: "/alphasense_driver_ros/cam5/color_rect/image/compressed" + info_topic: "/alphasense_driver_ros/cam5/color_rect/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_compressed_front.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_compressed_front.yaml new file mode 100644 index 00000000..6e284945 --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_compressed_front.yaml @@ -0,0 +1,12 @@ +camera_topics: + front: + image_topic: "/alphasense_driver_ros/cam4/color_rect/image/compressed" + info_topic: "/alphasense_driver_ros/cam4/color_rect/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_resize.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_resize.yaml new file mode 100644 index 00000000..bd7d2c61 --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/alphasense_resize.yaml @@ -0,0 +1,26 @@ +camera_topics: + front: + image_topic: "/alphasense_driver_ros/cam4/color_rect_resize/image" + info_topic: "/alphasense_driver_ros/cam4/color_rect_resize/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + left: + image_topic: "/alphasense_driver_ros/cam3/color_rect_resize/image" + info_topic: "/alphasense_driver_ros/cam3/color_rect_resize/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + right: + image_topic: "/alphasense_driver_ros/cam5/color_rect_resize/image" + info_topic: "/alphasense_driver_ros/cam5/color_rect_resize/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/hdr.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/hdr.yaml new file mode 100644 index 00000000..90c26af9 --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/hdr.yaml @@ -0,0 +1,12 @@ +camera_topics: + # Because this is the anybotics rear + rear: + image_topic: "/hdr_camera/image_raw" + info_topic: "/hdr_camera/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/none.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/none.yaml similarity index 100% rename from wild_visual_navigation_ros/config/wild_visual_navigation/inputs/none.yaml rename to wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/none.yaml diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/realsense_front.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/realsense_front.yaml similarity index 56% rename from wild_visual_navigation_ros/config/wild_visual_navigation/inputs/realsense_front.yaml rename to wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/realsense_front.yaml index 10d880e5..c98d812d 100644 --- a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/realsense_front.yaml +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/realsense_front.yaml @@ -3,7 +3,10 @@ camera_topics: image_topic: "/depth_camera_front/color/image_raw" info_topic: "/depth_camera_front/color/camera_info" use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 # Provides 480 (height) x 848 (width) images -network_input_image_height: 224 # 448 -network_input_image_width: 224 # 448 \ No newline at end of file +network_input_image_height: 448 # 448 +network_input_image_width: 448 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/realsense_rear.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/realsense_rear.yaml similarity index 77% rename from wild_visual_navigation_ros/config/wild_visual_navigation/inputs/realsense_rear.yaml rename to wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/realsense_rear.yaml index 074f5116..1cb0ea2c 100644 --- a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/realsense_rear.yaml +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/realsense_rear.yaml @@ -3,6 +3,9 @@ camera_topics: image_topic: "/depth_camera_rear/color/image_raw" info_topic: "/depth_camera_rear/color/camera_info" use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 # Provides 480 (height) x 848 (width) images network_input_image_height: 224 # 448 diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_dual.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_dual.yaml new file mode 100644 index 00000000..54235d12 --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_dual.yaml @@ -0,0 +1,18 @@ +camera_topics: + front: + image_topic: "/wide_angle_camera_front/image_color_rect" + info_topic: "/wide_angle_camera_front/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + rear: + image_topic: "/wide_angle_camera_rear/image_color_rect" + info_topic: "/wide_angle_camera_rear/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_dual_resize.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_dual_resize.yaml new file mode 100644 index 00000000..b119c22d --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_dual_resize.yaml @@ -0,0 +1,18 @@ +camera_topics: + front: + image_topic: "/wide_angle_camera_front/image_color_rect_resize" + info_topic: "/wide_angle_camera_front_resize/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + rear: + image_topic: "/wide_angle_camera_rear/image_color_rect_resize" + info_topic: "/wide_angle_camera_rear_resize/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front.yaml new file mode 100644 index 00000000..a8b8ca62 --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front.yaml @@ -0,0 +1,12 @@ +camera_topics: + rear: + image_topic: "/wide_angle_camera_rear/image_color" + info_topic: "/wide_angle_camera_rear/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 448 # 448 +network_input_image_width: 448 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front_compressed.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front_compressed.yaml new file mode 100644 index 00000000..41ac9801 --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front_compressed.yaml @@ -0,0 +1,12 @@ +camera_topics: + front: + image_topic: "/wide_angle_camera_rear/image_color_rect/compressed" + info_topic: "/wide_angle_camera_rear/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front_resize.yaml b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front_resize.yaml new file mode 100644 index 00000000..c3d8ae6b --- /dev/null +++ b/wild_visual_navigation_anymal/config/wild_visual_navigation/inputs/wide_angle_front_resize.yaml @@ -0,0 +1,12 @@ +camera_topics: + rear: + image_topic: "/wide_angle_camera_rear/image_color_resize" + info_topic: "/wide_angle_camera_rear_resize/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_anymal/launch/deprecated/convert_to_public_release.launch b/wild_visual_navigation_anymal/launch/deprecated/convert_to_public_release.launch new file mode 100644 index 00000000..0424ed7c --- /dev/null +++ b/wild_visual_navigation_anymal/launch/deprecated/convert_to_public_release.launch @@ -0,0 +1,39 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_anymal/launch/deprecated/raw_image_pipeline_perugia.launch b/wild_visual_navigation_anymal/launch/deprecated/raw_image_pipeline_perugia.launch new file mode 100644 index 00000000..fc44183d --- /dev/null +++ b/wild_visual_navigation_anymal/launch/deprecated/raw_image_pipeline_perugia.launch @@ -0,0 +1,287 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + + + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + + + + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + - 'image_transport/compressedDepth' + - 'image_transport/theora' + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/wild_visual_navigation_ros/launch/replay_launch.launch b/wild_visual_navigation_anymal/launch/deprecated/replay_launch.launch similarity index 78% rename from wild_visual_navigation_ros/launch/replay_launch.launch rename to wild_visual_navigation_anymal/launch/deprecated/replay_launch.launch index c2ee5612..12c12f93 100644 --- a/wild_visual_navigation_ros/launch/replay_launch.launch +++ b/wild_visual_navigation_anymal/launch/deprecated/replay_launch.launch @@ -2,18 +2,21 @@ - - + + - - + + - + - - - + + + + + + @@ -26,7 +29,7 @@ - + @@ -72,4 +75,9 @@ + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_ros/launch/shift_gridmaps.launch b/wild_visual_navigation_anymal/launch/deprecated/shift_gridmaps.launch similarity index 100% rename from wild_visual_navigation_ros/launch/shift_gridmaps.launch rename to wild_visual_navigation_anymal/launch/deprecated/shift_gridmaps.launch diff --git a/wild_visual_navigation_ros/launch/debug/trajectory_base_map_new.launch b/wild_visual_navigation_anymal/launch/deprecated/trajectory_base_map_new.launch similarity index 100% rename from wild_visual_navigation_ros/launch/debug/trajectory_base_map_new.launch rename to wild_visual_navigation_anymal/launch/deprecated/trajectory_base_map_new.launch diff --git a/wild_visual_navigation_ros/launch/debug/trajectory_server.launch b/wild_visual_navigation_anymal/launch/deprecated/trajectory_server.launch similarity index 100% rename from wild_visual_navigation_ros/launch/debug/trajectory_server.launch rename to wild_visual_navigation_anymal/launch/deprecated/trajectory_server.launch diff --git a/wild_visual_navigation_anymal/launch/elevation_mapping_cupy.launch b/wild_visual_navigation_anymal/launch/elevation_mapping_cupy.launch new file mode 100644 index 00000000..7f222389 --- /dev/null +++ b/wild_visual_navigation_anymal/launch/elevation_mapping_cupy.launch @@ -0,0 +1,9 @@ + + + + + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_ros/launch/field_local_planner.launch b/wild_visual_navigation_anymal/launch/field_local_planner.launch similarity index 100% rename from wild_visual_navigation_ros/launch/field_local_planner.launch rename to wild_visual_navigation_anymal/launch/field_local_planner.launch diff --git a/wild_visual_navigation_anymal/launch/overlay_images.launch b/wild_visual_navigation_anymal/launch/overlay_images.launch new file mode 100644 index 00000000..bf323deb --- /dev/null +++ b/wild_visual_navigation_anymal/launch/overlay_images.launch @@ -0,0 +1,28 @@ + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_anymal/launch/resize_hdr.launch b/wild_visual_navigation_anymal/launch/resize_hdr.launch new file mode 100644 index 00000000..dff77a73 --- /dev/null +++ b/wild_visual_navigation_anymal/launch/resize_hdr.launch @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_anymal/launch/resize_images_alphasense.launch b/wild_visual_navigation_anymal/launch/resize_images_alphasense.launch new file mode 100644 index 00000000..c51df980 --- /dev/null +++ b/wild_visual_navigation_anymal/launch/resize_images_alphasense.launch @@ -0,0 +1,55 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/wild_visual_navigation_anymal/launch/resize_images_wide_angle_dual.launch b/wild_visual_navigation_anymal/launch/resize_images_wide_angle_dual.launch new file mode 100644 index 00000000..7a3503c0 --- /dev/null +++ b/wild_visual_navigation_anymal/launch/resize_images_wide_angle_dual.launch @@ -0,0 +1,4 @@ + + + + \ No newline at end of file diff --git a/wild_visual_navigation_anymal/launch/resize_images_wide_angle_front.launch b/wild_visual_navigation_anymal/launch/resize_images_wide_angle_front.launch new file mode 100644 index 00000000..b772ecfc --- /dev/null +++ b/wild_visual_navigation_anymal/launch/resize_images_wide_angle_front.launch @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_anymal/launch/resize_images_wide_angle_rear.launch b/wild_visual_navigation_anymal/launch/resize_images_wide_angle_rear.launch new file mode 100644 index 00000000..8927054e --- /dev/null +++ b/wild_visual_navigation_anymal/launch/resize_images_wide_angle_rear.launch @@ -0,0 +1,12 @@ + + + + + + + + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_anymal/launch/robot_jetson.launch b/wild_visual_navigation_anymal/launch/robot_jetson.launch new file mode 100644 index 00000000..8f152d76 --- /dev/null +++ b/wild_visual_navigation_anymal/launch/robot_jetson.launch @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + diff --git a/wild_visual_navigation_anymal/launch/robot_npc.launch b/wild_visual_navigation_anymal/launch/robot_npc.launch new file mode 100644 index 00000000..7007dedf --- /dev/null +++ b/wild_visual_navigation_anymal/launch/robot_npc.launch @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/wild_visual_navigation_anymal/launch/robot_replay.launch b/wild_visual_navigation_anymal/launch/robot_replay.launch new file mode 100644 index 00000000..394bcab7 --- /dev/null +++ b/wild_visual_navigation_anymal/launch/robot_replay.launch @@ -0,0 +1,21 @@ + + + + + + + + + + + + + + + + + + + + + diff --git a/wild_visual_navigation_anymal/launch/view.launch b/wild_visual_navigation_anymal/launch/view.launch new file mode 100644 index 00000000..214b6d15 --- /dev/null +++ b/wild_visual_navigation_anymal/launch/view.launch @@ -0,0 +1,7 @@ + + + + + + + diff --git a/wild_visual_navigation_anymal/launch/wild_visual_navigation.launch b/wild_visual_navigation_anymal/launch/wild_visual_navigation.launch new file mode 100644 index 00000000..26831988 --- /dev/null +++ b/wild_visual_navigation_anymal/launch/wild_visual_navigation.launch @@ -0,0 +1,27 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/wild_visual_navigation_anymal/package.xml b/wild_visual_navigation_anymal/package.xml index 5714f941..ad4523b2 100644 --- a/wild_visual_navigation_anymal/package.xml +++ b/wild_visual_navigation_anymal/package.xml @@ -14,9 +14,9 @@ rospy roscpp - anymal_msgs nav_msgs sensor_msgs std_msgs wild_visual_navigation_msgs + wild_visual_navigation_ros diff --git a/wild_visual_navigation_anymal/scripts/anymal_msg_converter_node.py b/wild_visual_navigation_anymal/scripts/anymal_msg_converter_node.py old mode 100755 new mode 100644 index ad993880..8f101291 --- a/wild_visual_navigation_anymal/scripts/anymal_msg_converter_node.py +++ b/wild_visual_navigation_anymal/scripts/anymal_msg_converter_node.py @@ -1,4 +1,9 @@ #!/usr/bin/python3 +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation_msgs.msg import CustomState, RobotState from anymal_msgs.msg import AnymalState import rospy @@ -132,8 +137,8 @@ def anymal_msg_callback(anymal_state, return_msg=False): output_topic = rospy.get_param("~output_topic", "/wild_visual_navigation_node/robot_state") # Set publishers and subscribers - anymal_state_sub = rospy.Subscriber(anymal_state_topic, AnymalState, anymal_msg_callback, queue_size=20) robot_state_pub = rospy.Publisher(output_topic, RobotState, queue_size=20) + anymal_state_sub = rospy.Subscriber(anymal_state_topic, AnymalState, anymal_msg_callback, queue_size=20) rospy.loginfo("[anymal_msg_converter_node] ready") rospy.spin() diff --git a/wild_visual_navigation_anymal/scripts/anymal_msg_with_latent_converter_node.py b/wild_visual_navigation_anymal/scripts/anymal_msg_with_latent_converter_node.py old mode 100755 new mode 100644 index 3d40005c..2b47ce57 --- a/wild_visual_navigation_anymal/scripts/anymal_msg_with_latent_converter_node.py +++ b/wild_visual_navigation_anymal/scripts/anymal_msg_with_latent_converter_node.py @@ -1,4 +1,9 @@ #!/usr/bin/python3 +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from wild_visual_navigation_msgs.msg import CustomState, RobotState from anymal_msgs.msg import AnymalState from std_msgs.msg import Float32MultiArray diff --git a/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/convert_to_public_format.py b/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/convert_to_public_format.py new file mode 100644 index 00000000..586f0402 --- /dev/null +++ b/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/convert_to_public_format.py @@ -0,0 +1,133 @@ +#!/usr/bin/env python +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# + +import rospy +from anymal_msgs.msg import AnymalState +from sensor_msgs.msg import JointState +import tf2_ros +import geometry_msgs.msg + +# for wvn +from wild_visual_navigation_msgs.msg import CustomState, RobotState + +labels_initialized = False +# Preallocate messages +robot_state_msg = RobotState() +# Extract joint states +joint_position = CustomState() +joint_position.name = "joint_position" +joint_position.dim = 12 +joint_position.labels = [""] * joint_position.dim +joint_position.values = [0] * joint_position.dim +robot_state_msg.states.append(joint_position) + +# Joint velocity +joint_velocity = CustomState() +joint_velocity.name = "joint_velocity" +joint_velocity.dim = 12 +joint_velocity.labels = [""] * joint_velocity.dim +joint_velocity.values = [0] * joint_velocity.dim +robot_state_msg.states.append(joint_velocity) + +# Acceleration +joint_acceleration = CustomState() +joint_acceleration.name = "joint_acceleration" +joint_acceleration.dim = 12 +joint_acceleration.labels = [""] * joint_acceleration.dim +joint_acceleration.values = [0] * joint_acceleration.dim +robot_state_msg.states.append(joint_acceleration) + +# Effort +joint_effort = CustomState() +joint_effort.name = "joint_effort" +joint_effort.dim = 12 +joint_effort.labels = [""] * joint_effort.dim +joint_effort.values = [0] * joint_effort.dim +robot_state_msg.states.append(joint_effort) + +# Vector state +vector_state = CustomState() +vector_state.name = "vector_state" +vector_state.dim = 7 + 6 # + 4 * 12 +vector_state.values = [0] * vector_state.dim +vector_state.labels = [""] * vector_state.dim +vector_state.values = [0] * vector_state.dim +robot_state_msg.states.append(vector_state) + + +i = 0 +for x in ["tx", "ty", "tz", "qx", "qy", "qz", "qw", "vx", "vy", "vz", "wx", "wy", "wz"]: + robot_state_msg.states[4].labels[i] = x + i += 1 + + +def anymal_state_callback(anymal_state_msg): + global labels_initialized + + # Joint states for URDF + joint_state_msg = JointState() + joint_state_msg.header = anymal_state_msg.joints.header + joint_state_msg.header.frame_id = "" + joint_state_msg.name = anymal_state_msg.joints.name + joint_state_msg.position = anymal_state_msg.joints.position + # joint_state_msg.velocity = anymal_state_msg.joints.velocity + # joint_state_msg.effort = anymal_state_msg.joints.effort + joint_state_publisher.publish(joint_state_msg) + # Pose for odometry + pose = anymal_state_msg.pose.pose + + # Create a TransformStamped message + transform_stamped = geometry_msgs.msg.TransformStamped() + transform_stamped.header = anymal_state_msg.pose.header + transform_stamped.child_frame_id = "base" + transform_stamped.transform.translation = pose.position + transform_stamped.transform.rotation = pose.orientation + + # Publish the TF transform + tf_broadcaster.sendTransform(transform_stamped) + + # WVN + + # For RobotState msg + robot_state_msg.header = anymal_state_msg.header + + # Extract pose + robot_state_msg.pose = anymal_state_msg.pose + + # Extract twist + robot_state_msg.twist = anymal_state_msg.twist + # Vector state + robot_state_msg.states[4].values[0] = anymal_state_msg.pose.pose.position.x + robot_state_msg.states[4].values[1] = anymal_state_msg.pose.pose.position.y + robot_state_msg.states[4].values[2] = anymal_state_msg.pose.pose.position.z + robot_state_msg.states[4].values[3] = anymal_state_msg.pose.pose.orientation.x + robot_state_msg.states[4].values[4] = anymal_state_msg.pose.pose.orientation.y + robot_state_msg.states[4].values[5] = anymal_state_msg.pose.pose.orientation.z + robot_state_msg.states[4].values[6] = anymal_state_msg.pose.pose.orientation.w + robot_state_msg.states[4].values[7] = anymal_state_msg.twist.twist.linear.x + robot_state_msg.states[4].values[8] = anymal_state_msg.twist.twist.linear.y + robot_state_msg.states[4].values[9] = anymal_state_msg.twist.twist.linear.z + robot_state_msg.states[4].values[10] = anymal_state_msg.twist.twist.angular.x + robot_state_msg.states[4].values[11] = anymal_state_msg.twist.twist.angular.y + robot_state_msg.states[4].values[12] = anymal_state_msg.twist.twist.angular.z + robot_state_pub.publish(robot_state_msg) + + +if __name__ == "__main__": + rospy.init_node("anymal_state_republisher") + tf_broadcaster = tf2_ros.TransformBroadcaster() + + # Read parameters + joint_states_topic = rospy.get_param("~joint_states") + anymal_state_topic = rospy.get_param( + "~anymal_state_topic", + ) + output_topic = rospy.get_param("~output_topic") + joint_state_publisher = rospy.Publisher(joint_states_topic, JointState, queue_size=10) + robot_state_pub = rospy.Publisher(output_topic, RobotState, queue_size=10) + rospy.Subscriber(anymal_state_topic, AnymalState, anymal_state_callback, queue_size=1) + rospy.spin() diff --git a/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/encoding_fixer.py b/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/encoding_fixer.py new file mode 100644 index 00000000..48d2f2a1 --- /dev/null +++ b/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/encoding_fixer.py @@ -0,0 +1,39 @@ +#!/env/bin/python3 +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# + +from __future__ import print_function + +import sys +import rospy +from sensor_msgs.msg import Image +from cv_bridge import CvBridge + + +class ImageConverter: + def __init__(self, cam): + self.image_pub = rospy.Publisher(f"/alphasense_driver_ros/{cam}_corrected", Image, queue_size=10) + + self.bridge = CvBridge() + self.image_sub = rospy.Subscriber(f"/alphasense_driver_ros/{cam}", Image, self.callback) + + def callback(self, msg): + msg.encoding = "bayer_gbrg8" + self.image_pub.publish(msg) + + +def main(args): + rospy.init_node("image_converter", anonymous=True) + cam = rospy.get_param("~cam", "cam4") + ic = ImageConverter(cam) # noqa: F841 + try: + rospy.spin() + except KeyboardInterrupt: + print("Shutting down") + + +if __name__ == "__main__": + main(sys.argv) diff --git a/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/store_images_readme_preview.py b/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/store_images_readme_preview.py new file mode 100644 index 00000000..bf74818c --- /dev/null +++ b/wild_visual_navigation_anymal/scripts/open_source_rosbag_converter/store_images_readme_preview.py @@ -0,0 +1,55 @@ +#!/usr/bin/env python + +import rospy +import cv2 +import os +from sensor_msgs.msg import Image +from cv_bridge import CvBridge +from message_filters import ApproximateTimeSynchronizer, Subscriber +from PIL import Image as PILImage + + +class ImageSaver: + def __init__(self): + rospy.init_node("image_saver", anonymous=True) + self.bridge = CvBridge() + self.count = 0 + + self.dir_path = "/Data/open_source_dataset" + # Create directories to store images + # Subscriber for the images with approximate time synchronization + self.trav_sub = Subscriber("/wild_visual_navigation_visu_traversability_rear/traversability_overlayed", Image) + self.raw_sub = Subscriber("/wide_angle_camera_rear/image_color_rect_resize", Image) + self.sync = ApproximateTimeSynchronizer([self.trav_sub, self.raw_sub], queue_size=1, slop=0.3) + self.sync.registerCallback(self.callback) + + def callback(self, trav_msg, raw_msg): + try: + trav_cv2 = self.bridge.imgmsg_to_cv2(trav_msg, desired_encoding="passthrough") + raw_cv2 = self.bridge.imgmsg_to_cv2(raw_msg, desired_encoding="passthrough") + + # Convert images to RGBA format + trav_rgba = cv2.cvtColor(trav_cv2, cv2.COLOR_RGB2BGR) + raw_rgba = cv2.cvtColor(raw_cv2, cv2.COLOR_BGR2BGRA) + + # Save traversability image + trav_filename = os.path.join(self.dir_path, f"{self.count}_trav.png") + cv2.imwrite(trav_filename, trav_rgba) + rospy.loginfo(f"Saved traversability image: {trav_filename}") + + # Save raw image + raw_filename = os.path.join(self.dir_path, f"{self.count}_raw.png") + cv2.imwrite(raw_filename, raw_rgba) + rospy.loginfo(f"Saved raw image: {raw_filename}") + + self.count += 1 + except Exception as e: + rospy.logerr(f"Error processing images: {e}") + + +if __name__ == "__main__": + try: + ImageSaver() + rospy.spin() + except rospy.ROSInterruptException: + pass diff --git a/wild_visual_navigation_anymal/scripts/policy_debug_info_converter_node.py b/wild_visual_navigation_anymal/scripts/policy_debug_info_converter_node.py index 988fbb8c..82773fb5 100644 --- a/wild_visual_navigation_anymal/scripts/policy_debug_info_converter_node.py +++ b/wild_visual_navigation_anymal/scripts/policy_debug_info_converter_node.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import rospy from geometry_msgs.msg import TwistStamped from std_msgs.msg import Float32MultiArray @@ -16,7 +21,12 @@ def policy_debug_info_callback(debug_info_msg: Float32MultiArray): if __name__ == "__main__": rospy.init_node("policy_debug_info_converter_node") # Set publishers and subscribers - sub = rospy.Subscriber("/debug_info", Float32MultiArray, queue_size=10, callback=policy_debug_info_callback) + sub = rospy.Subscriber( + "/debug_info", + Float32MultiArray, + queue_size=10, + callback=policy_debug_info_callback, + ) pub = rospy.Publisher("/log/state/desiredRobotTwist", TwistStamped, queue_size=10) rospy.loginfo("[policy_debug_info_converter_node] ready") rospy.spin() diff --git a/wild_visual_navigation_anymal/src/anymal_msg_converter_cpp_node.cpp b/wild_visual_navigation_anymal/src/anymal_msg_converter_cpp_node.cpp index 2b536a1c..b4e82a3c 100644 --- a/wild_visual_navigation_anymal/src/anymal_msg_converter_cpp_node.cpp +++ b/wild_visual_navigation_anymal/src/anymal_msg_converter_cpp_node.cpp @@ -43,7 +43,7 @@ class AnymalMsgConverter { }; int main(int argc, char** argv) { - ros::init(argc, argv, "field_local_planner"); + ros::init(argc, argv, "anymal_msg_converter"); ros::NodeHandle nh("~"); AnymalMsgConverter converter(nh); return 0; diff --git a/wild_visual_navigation_msgs/CMakeLists.txt b/wild_visual_navigation_msgs/CMakeLists.txt index 20aaf45a..f8f47572 100644 --- a/wild_visual_navigation_msgs/CMakeLists.txt +++ b/wild_visual_navigation_msgs/CMakeLists.txt @@ -6,6 +6,7 @@ find_package(catkin REQUIRED COMPONENTS message_generation message_runtime std_msgs + sensor_msgs ) add_message_files( @@ -13,6 +14,7 @@ add_message_files( CustomState.msg RobotState.msg SystemState.msg + ImageFeatures.msg ) add_service_files( @@ -25,6 +27,7 @@ generate_messages( DEPENDENCIES geometry_msgs std_msgs + sensor_msgs ) catkin_package( @@ -32,4 +35,5 @@ catkin_package( message_generation message_runtime std_msgs + sensor_msgs ) diff --git a/wild_visual_navigation_msgs/msg/ImageFeatures.msg b/wild_visual_navigation_msgs/msg/ImageFeatures.msg new file mode 100644 index 00000000..8360f6a6 --- /dev/null +++ b/wild_visual_navigation_msgs/msg/ImageFeatures.msg @@ -0,0 +1,5 @@ +# Header +std_msgs/Header header +sensor_msgs/Image feature_segments +std_msgs/Float32MultiArray features + diff --git a/wild_visual_navigation_msgs/msg/SystemState.msg b/wild_visual_navigation_msgs/msg/SystemState.msg index 2b1e314d..03e12f9c 100644 --- a/wild_visual_navigation_msgs/msg/SystemState.msg +++ b/wild_visual_navigation_msgs/msg/SystemState.msg @@ -1,5 +1,7 @@ # Pipeline operation mode uint32 mode +# Current valid samples +uint32 mission_graph_num_valid_node # Training loss float32 loss_total # Training loss @@ -10,5 +12,3 @@ float32 loss_reco uint32 step # Pause learning flag bool pause_learning -# Traversability thereshold scaling -float32 scale_traversability_threshold \ No newline at end of file diff --git a/wild_visual_navigation_msgs/package.xml b/wild_visual_navigation_msgs/package.xml index c1352f70..41fe2074 100644 --- a/wild_visual_navigation_msgs/package.xml +++ b/wild_visual_navigation_msgs/package.xml @@ -16,4 +16,5 @@ message_generation message_runtime std_msgs + sensor_msgs diff --git a/wild_visual_navigation_ros/CMakeLists.txt b/wild_visual_navigation_ros/CMakeLists.txt index 830d02f5..8953d5e1 100644 --- a/wild_visual_navigation_ros/CMakeLists.txt +++ b/wild_visual_navigation_ros/CMakeLists.txt @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# cmake_minimum_required(VERSION 3.0.2) project(wild_visual_navigation_ros) @@ -14,9 +19,9 @@ catkin_package( ) catkin_python_setup() -catkin_install_python(PROGRAMS scripts/wild_visual_navigation_node.py +catkin_install_python(PROGRAMS scripts/wvn_feature_extractor_node.py + scripts/wvn_learning_node.py scripts/overlay_images.py - scripts/shift_gridmap.py scripts/smart_carrot.py scripts/rosbag_play.sh DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) diff --git a/wild_visual_navigation_ros/README.md b/wild_visual_navigation_ros/README.md new file mode 100644 index 00000000..4eda8845 --- /dev/null +++ b/wild_visual_navigation_ros/README.md @@ -0,0 +1,13 @@ +Commands for deployment: + +# Start/stop recording +$HOME/workspaces/catkin_ws/src/wild_visual_navigation/wild_visual_navigation_ros/config/recording/start_recording.sh +$HOME/workspaces/catkin_ws/src/wild_visual_navigation/wild_visual_navigation_ros/config/recording/stop_recording.sh + +# Copying the data +$HOME/workspaces/catkin_ws/src/anymal_rsl/anymal_rsl/anymal_rsl_utils/anymal_rsl_recording/anymal_rsl_recording/bin/copy_mission_data_from_robot.sh jonfrey dodo 2024-02-06-15-20-33 /Data/2024_02_06_Dodo_MPI_WVN + +ssh jonfrey@anymal-dodo-jetson -t 'chown -R jonfrey ~/git/anymal_rsl/anymal_rsl/anymal_rsl_utils/anymal_rsl_recording/anymal_rsl_recording/data/' +ssh jonfrey@anymal-dodo-jetson -t 'chgrp -R jonfrey ~/git/anymal_rsl/anymal_rsl/anymal_rsl_utils/anymal_rsl_recording/anymal_rsl_recording/data/' + +$HOME/workspaces/catkin_ws/src/anymal_rsl/anymal_rsl/anymal_rsl_utils/anymal_rsl_recording/anymal_rsl_recording/bin/remove_mission_data_from_robot.sh jonfrey dodo \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_sensor_parameter.yaml b/wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_sensor_parameter.yaml deleted file mode 100644 index 5810011a..00000000 --- a/wild_visual_navigation_ros/config/elevation_mapping_cupy/anymal_sensor_parameter.yaml +++ /dev/null @@ -1,141 +0,0 @@ -#### Subscribers ######## -subscribers: - # RGB cameras - # alphasense_front_rgb: - # fusion: ['image_color'] - # topic_name_camera: /alphasense_driver_ros/cam4/debayered - # topic_name_camera_info: /alphasense_driver_ros/cam4/camera_info - # channels: ["rgb_image"] - # data_type: image - - # alphasense_left_rgb: - # fusion: ['image_color'] - # topic_name_camera: /alphasense_driver_ros/cam3/debayered - # topic_name_camera_info: /alphasense_driver_ros/cam3/camera_info - # channels: ["rgb_image"] - # data_type: image - - # alphasense_right_rgb: - # fusion: ['image_color'] - # topic_name_camera: /alphasense_driver_ros/cam5/debayered - # topic_name_camera_info: /alphasense_driver_ros/cam5/camera_info - # channels: ["rgb_image"] - # data_type: image - - # wide_angle_camera_rear: - # fusion: ['image_color'] - # topic_name_camera: /wide_angle_camera_rear/image_color_rect/compressed - # topic_name_camera_info: /wide_angle_camera_rear/camera_info - # channels: ["rgb_image"] - # data_type: image - - # wide_angle_camera_front: - # fusion: ['image_color'] - # topic_name_camera: /wide_angle_camera_front/image_color_rect/compressed - # topic_name_camera_info: /wide_angle_camera_front/camera_info - # channels: ["rgb_image"] - # data_type: image - - # Traversability - wvn_front_traversability: - fusion: ['image_exponential'] - topic_name_camera: '/wild_visual_navigation_node/front/traversability' - topic_name_camera_info: '/wild_visual_navigation_node/front/camera_info' - channels: ["visual_traversability"] - data_type: image - - # wvn_left_traversability: - # fusion: ['image_exponential'] - # topic_name_camera: '/wild_visual_navigation_node/left/traversability' - # topic_name_camera_info: '/wild_visual_navigation_node/left/camera_info' - # channels: ["visual_traversability"] - # data_type: image - - # wvn_right_traversability: - # fusion: ['image_exponential'] - # topic_name_camera: '/wild_visual_navigation_node/right/traversability' - # topic_name_camera_info: '/wild_visual_navigation_node/right/camera_info' - # channels: ["visual_traversability"] - # data_type: image - - # Confidence - # wvn_front_confidence: - # fusion: ['image_exponential'] - # topic_name_camera: '/wild_visual_navigation_node/front/confidence' - # topic_name_camera_info: '/wild_visual_navigation_node/front/camera_info' - # channels: ["visual_confidence"] - # data_type: image - - # wvn_left_confidence: - # fusion: ['image_exponential'] - # topic_name_camera: '/wild_visual_navigation_node/left/confidence' - # topic_name_camera_info: '/wild_visual_navigation_node/left/camera_info' - # channels: ["visual_confidence"] - # data_type: image - - # wvn_right_confidence: - # fusion: ['image_exponential'] - # topic_name_camera: '/wild_visual_navigation_node/right/confidence_raw' - # topic_name_camera_info: '/wild_visual_navigation_node/right/camera_info' - # channels: ["visual_confidence"] - # data_type: image - - - front_depth: - channels: [] - fusion: [] - topic_name: /depth_camera_front/point_cloud_self_filtered - data_type: pointcloud - - rear_depth: - channels: [] - fusion: [] - topic_name: /depth_camera_rear/point_cloud_self_filtered - data_type: pointcloud - - left_depth: - channels: [] - fusion: [] - topic_name: /depth_camera_left/point_cloud_self_filtered - data_type: pointcloud - - right_depth: - channels: [] - fusion: [] - topic_name: /depth_camera_right/point_cloud_self_filtered - data_type: pointcloud - - - # velodyne: - # channels: [] - # fusion: [] - # topic_name: /point_cloud_filter/lidar/point_cloud_filtered - # data_type: pointcloud - - - # Cerberus - front_bpearl: - channels: [] - fusion: [] - topic_name: /robot_self_filter/bpearl_front/point_cloud - data_type: pointcloud - - rear_bpearl: - channels: [] - fusion: [] - topic_name: /robot_self_filter/bpearl_rear/point_cloud - data_type: pointcloud - - - # Cameras for D - # front_depth_d: - # channels: [] - # fusion: [] - # topic_name: /depth_camera_front_upper/point_cloud_self_filtered - # data_type: pointcloud - - # rear_depth_d: - # channels: [] - # fusion: [] - # topic_name: /depth_camera_rear_upper/point_cloud_self_filtered - # data_type: pointcloud \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_parameters.yaml b/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_parameters.yaml new file mode 100644 index 00000000..a9440421 --- /dev/null +++ b/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_parameters.yaml @@ -0,0 +1,102 @@ +#### Basic parameters ######## +resolution: 0.04 # resolution in m. +map_length: 8 # map's size in m. +sensor_noise_factor: 0.05 # point's noise is sensor_noise_factor*z^2 (z is distance from sensor). +mahalanobis_thresh: 2.0 # points outside this distance is outlier. +outlier_variance: 0.01 # if point is outlier, add this value to the cell. +drift_compensation_variance_inler: 0.05 # cells under this value is used for drift compensation. +max_drift: 0.1 # drift compensation happens only the drift is smaller than this value (for safety) +drift_compensation_alpha: 0.1 # drift compensation alpha for smoother update of drift compensation +time_variance: 0.0001 # add this value when update_variance is called. +max_variance: 100.0 # maximum variance for each cell. +initial_variance: 1000.0 # initial variance for each cell. +traversability_inlier: 0.9 # cells with higher traversability are used for drift compensation. +dilation_size: 3 # dilation filter size before traversability filter. +wall_num_thresh: 20 # if there are more points than this value, only higher points than the current height are used to make the wall more sharp. +min_height_drift_cnt: 100 # drift compensation only happens if the valid cells are more than this number. +position_noise_thresh: 0.01 # if the position change is bigger than this value, the drift compensation happens. +orientation_noise_thresh: 0.01 # if the orientation change is bigger than this value, the drift compensation happens. +position_lowpass_alpha: 0.2 # lowpass filter alpha used for detecting movements. +orientation_lowpass_alpha: 0.2 # lowpass filter alpha used for detecting movements. +min_valid_distance: 0.5 # points with shorter distance will be filtered out. +max_height_range: 1.0 # points higher than this value from sensor will be filtered out to disable ceiling. +ramped_height_range_a: 0.3 # if z > max(d - ramped_height_range_b, 0) * ramped_height_range_a + ramped_height_range_c, reject. +ramped_height_range_b: 1.0 # if z > max(d - ramped_height_range_b, 0) * ramped_height_range_a + ramped_height_range_c, reject. +ramped_height_range_c: 0.2 # if z > max(d - ramped_height_range_b, 0) * ramped_height_range_a + ramped_height_range_c, reject. +update_variance_fps: 5.0 # fps for updating variance. +update_pose_fps: 10.0 # fps for updating pose and shift the center of map. +time_interval: 0.1 # Time layer is updated with this interval. +map_acquire_fps: 5.0 # Raw map is fetched from GPU memory in this fps. +publish_statistics_fps: 1.0 # Publish statistics topic in this fps. + +max_ray_length: 10.0 # maximum length for ray tracing. +cleanup_step: 0.1 # subtitute this value from validity layer at visibiltiy cleanup. +cleanup_cos_thresh: 0.1 # subtitute this value from validity layer at visibiltiy cleanup. + +safe_thresh: 0.7 # if traversability is smaller, it is counted as unsafe cell. +safe_min_thresh: 0.4 # polygon is unsafe if there exists lower traversability than this. +max_unsafe_n: 10 # if the number of cells under safe_thresh exceeds this value, polygon is unsafe. + +overlap_clear_range_xy: 4.0 # xy range [m] for clearing overlapped area. this defines the valid area for overlap clearance. (used for multi floor setting) +overlap_clear_range_z: 2.0 # z range [m] for clearing overlapped area. cells outside this range will be cleared. (used for multi floor setting) + +map_frame: 'odom' # The map frame where the odometry source uses. +base_frame: 'base' # The robot's base frame. This frame will be a center of the map. +corrected_map_frame: 'odom_corrected' + +#### Feature toggles ######## +enable_edge_sharpen: true +enable_visibility_cleanup: true +enable_drift_compensation: true +enable_overlap_clearance: true +enable_pointcloud_publishing: false +enable_drift_corrected_TF_publishing: false +enable_normal_color: false # If true, the map contains 'color' layer corresponding to normal. Add 'color' layer to the publishers setting if you want to visualize. + +#### Traversability filter ######## +use_chainer: false # Use chainer as a backend of traversability filter or pytorch. If false, it uses pytorch. pytorch requires ~2GB more GPU memory compared to chainer but runs faster. +weight_file: '$(rospack find elevation_mapping_cupy)/config/core/weights.dat' # Weight file for traversability filter + +#### Upper bound ######## +use_only_above_for_upper_bound: false + +#### Initializer ######## +initialize_method: 'linear' # Choose one from 'nearest', 'linear', 'cubic' +initialize_frame_id: ['RF_FOOT', 'LF_FOOT', 'RH_FOOT', 'LH_FOOT'] # One tf (like ['footprint'] ) initializes a square around it. +initialize_tf_offset: [0.0, 0.0, 0.0, 0.0] # z direction. Should be same number as initialize_frame_id. +dilation_size_initialize: 5 # dilation size after the init. +initialize_tf_grid_size: 0.4 # This is not used if number of tf is more than 3. +use_initializer_at_start: true # Use initializer when the node starts. + +#### Plugins ######## +plugin_config_file: '$(rospack find wild_visual_navigation_ros)/config/elevation_mapping_cupy/anymal_plugin_config.yaml' + +pointcloud_channel_fusions: + rgb: 'color' + default: 'average' + +image_channel_fusions: + rgb: 'color' + default: 'exponential' + feat_.*: 'exponential' + sem_.*: 'exponential' + + +#### Publishers ######## +# topic_name: +# layers: # Choose from 'elevation', 'variance', 'traversability', 'time', 'normal_x', 'normal_y', 'normal_z', 'color', plugin_layer_names +# basic_layers: # basic_layers for valid cell computation (e.g. Rviz): Choose a subset of `layers`. +# fps: # Publish rate. Use smaller value than `map_acquire_fps`. + +publishers: + elevation_map_raw: + layers: ['elevation', 'traversability', 'variance', 'rgb'] + basic_layers: ['elevation', 'traversability'] + fps: 5.0 + + semantic_map_raw: + layers: ['elevation', 'traversability', 'visual_traversability', 'visual_confidence', 'elevation_with_semantics'] + basic_layers: ['elevation', 'traversability'] + fps: 5.0 + + \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_plugin_config.yaml b/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_plugin_config.yaml new file mode 100644 index 00000000..9e41a17c --- /dev/null +++ b/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_plugin_config.yaml @@ -0,0 +1,38 @@ +# Settings of the plugins. (The plugins should be stored in script/plugins) + +# min_filter fills in minimum value around the invalid cell. +min_filter: + enable: True # weather to load this plugin + fill_nan: False # Fill nans to invalid cells of elevation layer. + is_height_layer: True # If this is a height layer (such as elevation) or not (such as traversability) + layer_name: "min_filter" # The layer name. + extra_params: # This params are passed to the plugin class on initialization. + dilation_size: 1 # The patch size to apply + iteration_n: 30 # The number of iterations + +# Apply smoothing. +smooth_filter: + enable: True + fill_nan: False + is_height_layer: True + layer_name: "smooth" + extra_params: + input_layer_name: "min_filter" + +# Apply inpainting using opencv +inpainting: + enable: True + fill_nan: False + is_height_layer: True + layer_name: "inpaint" + extra_params: + method: "telea" # telea or ns + +# Apply inpainting using opencv +mask_elevation_semantics: + enable: True + fill_nan: False + is_height_layer: True + layer_name: "elevation_with_semantics" + extra_params: + semantic_reference_layer_name: "visual_traversability" \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_sensor_parameter.yaml b/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_sensor_parameter.yaml new file mode 100644 index 00000000..3b034e76 --- /dev/null +++ b/wild_visual_navigation_ros/config/elevation_mapping_cupy/wvn_sensor_parameter.yaml @@ -0,0 +1,67 @@ +#### Subscribers ######## +subscribers: + front_upper_depth: + topic_name: /depth_camera_front_upper/point_cloud_self_filtered + data_type: pointcloud + + front_lower_depth: + topic_name: /depth_camera_front_lower/point_cloud_self_filtered + data_type: pointcloud + + rear_upper_depth: + topic_name: /depth_camera_rear_upper/point_cloud_self_filtered + data_type: pointcloud + + rear_lower_depth: + topic_name: /depth_camera_rear_lower/point_cloud_self_filtered + data_type: pointcloud + + left_depth: + topic_name: /depth_camera_left/point_cloud_self_filtered + data_type: pointcloud + + right_depth: + topic_name: /depth_camera_right/point_cloud_self_filtered + data_type: pointcloud + + velodyne: + topic_name: /point_cloud_filter/lidar/point_cloud_filtered + data_type: pointcloud + + # Semantics + front_wide_angle: + topic_name: /wide_angle_camera_front/image_color_rect/compressed + camera_info_topic_name: /wide_angle_camera_front/camera_info + data_type: image + + rear_wide_angle: + topic_name: /wide_angle_camera_rear/image_color_rect/compressed + camera_info_topic_name: /wide_angle_camera_rear/camera_info + data_type: image + + + # Traversability + # channels: ["visual_traversability"] + sem_wvn_front_traversability: + topic_name: '/wild_visual_navigation_node/front/traversability' + camera_info_topic_name: '/wild_visual_navigation_node/front/camera_info' + channels: ['visual_traversability'] + data_type: image + + sem_wvn_left_traversability: + topic_name: '/wild_visual_navigation_node/left/traversability' + camera_info_topic_name: '/wild_visual_navigation_node/left/camera_info' + channels: ['visual_traversability'] + data_type: image + + sem_wvn_right_traversability: + topic_name: '/wild_visual_navigation_node/right/traversability' + camera_info_topic_name: '/wild_visual_navigation_node/right/camera_info' + channels: ['visual_traversability'] + data_type: image + + # sem_wvn_front_confidence: + # topic_name: '/wild_visual_navigation_node/front/confidence' + # camera_info_topic_name: '/wild_visual_navigation_node/front/camera_info' + # channels: ['visual_traversability'] + # data_type: image \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/field_local_planner/visual/filter_chain_wifi.yaml b/wild_visual_navigation_ros/config/field_local_planner/visual/filter_chain_wifi.yaml deleted file mode 100644 index f36d72c3..00000000 --- a/wild_visual_navigation_ros/config/field_local_planner/visual/filter_chain_wifi.yaml +++ /dev/null @@ -1,6 +0,0 @@ -grid_map_filters: - - - name: delete - type: gridMapFiltersDrs/DeleteAllButFilter - params: - layers: [elevation, elevation_inpainted, geometric_traversability, visual_traversability_inpainted, geodesic, sdf] # List of layers. \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/rviz/open_source.rviz b/wild_visual_navigation_ros/config/rviz/open_source.rviz new file mode 100644 index 00000000..baafb702 --- /dev/null +++ b/wild_visual_navigation_ros/config/rviz/open_source.rviz @@ -0,0 +1,1030 @@ +Panels: + - Class: rviz/Displays + Help Height: 0 + Name: Displays + Property Tree Widget: + Expanded: + - /Global Options1 + - /TF1/Tree1 + - /Robot Info1 + - /Wild Visual Navigation1 + - /Wild Visual Navigation1/Self Supervision1 + - /Wild Visual Navigation1/Cameras Resized1 + - /Wild Visual Navigation1/Depth Sensors1 + - /Wild Visual Navigation1/Prediction1 + Splitter Ratio: 0.6411483287811279 + Tree Height: 863 + - Class: rviz/Selection + Name: Selection + - Class: rviz/Tool Properties + Expanded: + - /2D Pose Estimate1 + - /2D Nav Goal1 + - /Publish Point1 + Name: Tool Properties + Splitter Ratio: 0.5886790156364441 + - Class: rviz/Views + Expanded: + - /Current View1 + Name: Views + Splitter Ratio: 0.5 + - Class: rviz/Time + Name: Time + SyncMode: 0 + SyncSource: AB Wide Angle Front +Preferences: + PromptSaveOnExit: true +Toolbars: + toolButtonStyle: 2 +Visualization Manager: + Class: "" + Displays: + - Alpha: 0.5 + Cell Size: 1 + Class: rviz/Grid + Color: 160; 160; 164 + Enabled: false + Line Style: + Line Width: 0.029999999329447746 + Value: Lines + Name: Grid + Normal Cell Count: 0 + Offset: + X: 0 + Y: 0 + Z: 0 + Plane: XY + Plane Cell Count: 20 + Reference Frame: + Value: false + - Class: rviz/TF + Enabled: true + Frame Timeout: 1000 + Frames: + All Enabled: false + LF_FOOT: + Value: true + LF_HAA_drive: + Value: true + LF_HFE_drive: + Value: true + LF_HFE_output: + Value: true + LF_HIP: + Value: true + LF_KFE_drive: + Value: true + LF_SHANK: + Value: true + LF_THIGH: + Value: true + LF_hip_fixed: + Value: true + LF_shank_fixed: + Value: true + LF_thigh_fixed: + Value: true + LH_FOOT: + Value: true + LH_HAA_drive: + Value: true + LH_HFE_drive: + Value: true + LH_HFE_output: + Value: true + LH_HIP: + Value: true + LH_KFE_drive: + Value: true + LH_SHANK: + Value: true + LH_THIGH: + Value: true + LH_hip_fixed: + Value: true + LH_shank_fixed: + Value: true + LH_thigh_fixed: + Value: true + RF_FOOT: + Value: true + RF_HAA_drive: + Value: true + RF_HFE_drive: + Value: true + RF_HFE_output: + Value: true + RF_HIP: + Value: true + RF_KFE_drive: + Value: true + RF_SHANK: + Value: true + RF_THIGH: + Value: true + RF_hip_fixed: + Value: true + RF_shank_fixed: + Value: true + RF_thigh_fixed: + Value: true + RH_FOOT: + Value: true + RH_HAA_drive: + Value: true + RH_HFE_drive: + Value: true + RH_HFE_output: + Value: true + RH_HIP: + Value: true + RH_KFE_drive: + Value: true + RH_SHANK: + Value: true + RH_THIGH: + Value: true + RH_hip_fixed: + Value: true + RH_shank_fixed: + Value: true + RH_thigh_fixed: + Value: true + base: + Value: true + base_inertia: + Value: true + battery: + Value: true + body_top: + Value: true + bottom_shell: + Value: true + camera_init: + Value: true + camera_init_CORRECTED: + Value: true + depth_camera_front_lower_camera: + Value: true + depth_camera_front_lower_camera_parent: + Value: true + depth_camera_front_upper_camera: + Value: true + depth_camera_front_upper_camera_parent: + Value: true + depth_camera_left_camera: + Value: true + depth_camera_left_camera_parent: + Value: true + depth_camera_rear_lower_camera: + Value: true + depth_camera_rear_lower_camera_parent: + Value: true + depth_camera_rear_upper_camera: + Value: true + depth_camera_rear_upper_camera_parent: + Value: true + depth_camera_right_camera: + Value: true + depth_camera_right_camera_parent: + Value: true + docking_socket: + Value: true + face_front: + Value: true + face_rear: + Value: true + face_shell_front: + Value: true + face_shell_rear: + Value: true + feetcenter: + Value: true + footprint: + Value: true + hatch_shell: + Value: true + hbc_receiver: + Value: true + imu_link: + Value: true + lidar: + Value: true + lidar_parent: + Value: true + map: + Value: true + map_o3d_localization_manager: + Value: true + msf_body_imu_map: + Value: true + odom: + Value: true + top_shell: + Value: true + wide_angle_camera_front_camera: + Value: true + wide_angle_camera_front_camera_parent: + Value: true + wide_angle_camera_rear_camera: + Value: true + wide_angle_camera_rear_camera_parent: + Value: true + Marker Alpha: 1 + Marker Scale: 0.5 + Name: TF + Show Arrows: true + Show Axes: true + Show Names: true + Tree: + odom: + base: + LF_HAA_drive: + LF_HIP: + LF_hip_fixed: + LF_HFE_output: + LF_THIGH: + LF_HFE_drive: + LF_thigh_fixed: + LF_KFE_drive: + LF_SHANK: + LF_shank_fixed: + LF_FOOT: + {} + LH_HAA_drive: + LH_HIP: + LH_hip_fixed: + LH_HFE_output: + LH_THIGH: + LH_HFE_drive: + LH_thigh_fixed: + LH_KFE_drive: + LH_SHANK: + LH_shank_fixed: + LH_FOOT: + {} + RF_HAA_drive: + RF_HIP: + RF_hip_fixed: + RF_HFE_output: + RF_THIGH: + RF_HFE_drive: + RF_thigh_fixed: + RF_KFE_drive: + RF_SHANK: + RF_shank_fixed: + RF_FOOT: + {} + RH_HAA_drive: + RH_HIP: + RH_hip_fixed: + RH_HFE_output: + RH_THIGH: + RH_HFE_drive: + RH_thigh_fixed: + RH_KFE_drive: + RH_SHANK: + RH_shank_fixed: + RH_FOOT: + {} + base_inertia: + {} + battery: + {} + body_top: + {} + bottom_shell: + {} + depth_camera_left_camera: + depth_camera_left_camera_parent: + {} + depth_camera_right_camera: + depth_camera_right_camera_parent: + {} + docking_socket: + {} + face_front: + depth_camera_front_lower_camera: + depth_camera_front_lower_camera_parent: + {} + depth_camera_front_upper_camera: + depth_camera_front_upper_camera_parent: + {} + face_shell_front: + {} + wide_angle_camera_front_camera: + wide_angle_camera_front_camera_parent: + {} + face_rear: + depth_camera_rear_lower_camera: + depth_camera_rear_lower_camera_parent: + {} + depth_camera_rear_upper_camera: + depth_camera_rear_upper_camera_parent: + {} + face_shell_rear: + {} + wide_angle_camera_rear_camera: + wide_angle_camera_rear_camera_parent: + {} + hatch_shell: + {} + hbc_receiver: + {} + imu_link: + {} + lidar_parent: + lidar: + {} + top_shell: + {} + feetcenter: + {} + footprint: + {} + map_o3d_localization_manager: + {} + msf_body_imu_map: + {} + Update Interval: 0 + Value: true + - Class: rviz/Group + Displays: + - Angle Tolerance: 0.10000000149011612 + Class: rviz/Odometry + Covariance: + Orientation: + Alpha: 0.5 + Color: 255; 255; 127 + Color Style: Unique + Frame: Local + Offset: 1 + Scale: 1 + Value: true + Position: + Alpha: 0.30000001192092896 + Color: 204; 51; 204 + Scale: 1 + Value: true + Value: true + Enabled: false + Keep: 200 + Name: Odometry + Position Tolerance: 1 + Queue Size: 10 + Shape: + Alpha: 1 + Axes Length: 1 + Axes Radius: 0.10000000149011612 + Color: 255; 25; 0 + Head Length: 0.30000001192092896 + Head Radius: 0.10000000149011612 + Shaft Length: 1 + Shaft Radius: 0.05000000074505806 + Value: Arrow + Topic: /loam/odometry + Unreliable: false + Value: false + - Alpha: 1 + Class: rviz/RobotModel + Collision Enabled: false + Enabled: true + Links: + All Links Enabled: true + Expand Joint Details: false + Expand Link Details: false + Expand Tree: false + Link Tree Style: Links in Alphabetic Order + Name: RobotModel + Robot Description: anymal_description + TF Prefix: "" + Update Interval: 0 + Value: true + Visual Enabled: true + Enabled: true + Name: Robot Info + - Class: rviz/Group + Displays: + - Class: rviz/Group + Displays: + - Class: rviz/Marker + Enabled: true + Marker Topic: /wild_visual_navigation_node/graph_footprints + Name: Footprint + Namespaces: + footprints: true + Queue Size: 100 + Value: true + - Alpha: 1 + Buffer Length: 1 + Class: rviz/Path + Color: 67; 110; 176 + Enabled: true + Head Diameter: 0.30000001192092896 + Head Length: 0.20000000298023224 + Length: 0.30000001192092896 + Line Style: Billboards + Line Width: 0.029999999329447746 + Name: Supervision Graph + Offset: + X: 0 + Y: 0 + Z: 0 + Pose Color: 255; 85; 255 + Pose Style: None + Queue Size: 10 + Radius: 0.029999999329447746 + Shaft Diameter: 0.10000000149011612 + Shaft Length: 0.10000000149011612 + Topic: /wild_visual_navigation_node/supervision_graph + Unreliable: false + Value: true + - Alpha: 1 + Buffer Length: 1 + Class: rviz/Path + Color: 25; 255; 0 + Enabled: true + Head Diameter: 0.30000001192092896 + Head Length: 0.20000000298023224 + Length: 0.30000001192092896 + Line Style: Billboards + Line Width: 0.029999999329447746 + Name: Mission Graph + Offset: + X: 0 + Y: 0 + Z: 0 + Pose Color: 255; 85; 255 + Pose Style: Axes + Queue Size: 10 + Radius: 0.029999999329447746 + Shaft Diameter: 0.10000000149011612 + Shaft Length: 0.10000000149011612 + Topic: /wild_visual_navigation_node/mission_graph + Unreliable: false + Value: true + Enabled: true + Name: Self Supervision + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_image_traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Traversability + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_image_confidence + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Uncertainty + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Alpha: 1 + Buffer Length: 1 + Class: rviz/Path + Color: 136; 138; 133 + Enabled: true + Head Diameter: 0.30000001192092896 + Head Length: 0.20000000298023224 + Length: 0.10000000149011612 + Line Style: Billboards + Line Width: 0.05000000074505806 + Name: Mission Graph + Offset: + X: 0 + Y: 0 + Z: 0 + Pose Color: 255; 85; 255 + Pose Style: Axes + Queue Size: 10 + Radius: 0.009999999776482582 + Shaft Diameter: 0.10000000149011612 + Shaft Length: 0.10000000149011612 + Topic: /wild_visual_navigation_node/mission_graph + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_mask + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Reprojected Path + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_labeled + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Supervision Signal + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: false + Name: Self Supervision (Debug) + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: true + Image Topic: /wide_angle_camera_front/image_color_rect_resize + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: AB Wide Angle Front + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wide_angle_camera_rear/image_color_rect_resize + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: AB Wide Rear Front + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: false + Image Topic: /hdr_camera_resize/image_raw + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: HDR + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + Enabled: true + Name: Cameras Resized + - Class: rviz/Group + Displays: + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-front + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_front_upper/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-rear + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_rear_upper/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-left + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_left/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 21.55472183227539 + Min Value: -7.581119537353516 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: FlatColor + Decay Time: 0 + Enabled: true + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: realsense-right + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.009999999776482582 + Style: Squares + Topic: /depth_camera_right/point_cloud_self_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: true + - Alpha: 1 + Autocompute Intensity Bounds: true + Autocompute Value Bounds: + Max Value: 4.33540153503418 + Min Value: -0.6894725561141968 + Value: true + Axis: Z + Channel Name: intensity + Class: rviz/PointCloud2 + Color: 239; 41; 41 + Color Transformer: AxisColor + Decay Time: 0 + Enabled: false + Invert Rainbow: false + Max Color: 255; 255; 255 + Min Color: 0; 0; 0 + Name: velodyne + Position Transformer: XYZ + Queue Size: 10 + Selectable: true + Size (Pixels): 3 + Size (m): 0.019999999552965164 + Style: Squares + Topic: /point_cloud_filter/lidar/point_cloud_filtered + Unreliable: false + Use Fixed Frame: true + Use rainbow: true + Value: false + Enabled: false + Name: Depth Sensors + - Class: rviz/Group + Displays: + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/front/image_input + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Input Image + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/front/traversability + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Traversability Raw + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: false + Image Topic: /wild_visual_navigation_node/front/debug/last_node_image_overlay + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: Learning Mask + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: false + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_visu_traversability_front/traversability_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Traversability Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_visu_traversability_rear/traversability_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Traversability Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_visu_confidence_front/confidence_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: F Confidence Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + - Class: rviz/Image + Enabled: true + Image Topic: /wild_visual_navigation_visu_confidence_rear/confidence_overlayed + Max Value: 1 + Median window: 5 + Min Value: 0 + Name: R Confidence Overlay + Normalize Range: true + Queue Size: 2 + Transport Hint: raw + Unreliable: false + Value: true + Enabled: true + Name: Prediction + - Class: rviz/Group + Displays: + - Class: rviz/Marker + Enabled: false + Marker Topic: /field_local_planner/real_carrot + Name: Goal (carrot) + Namespaces: + {} + Queue Size: 100 + Value: false + - Alpha: 1 + Axes Length: 1 + Axes Radius: 0.10000000149011612 + Class: rviz/PoseWithCovariance + Color: 255; 25; 0 + Covariance: + Orientation: + Alpha: 0.5 + Color: 255; 255; 127 + Color Style: Unique + Frame: Local + Offset: 1 + Scale: 1 + Value: true + Position: + Alpha: 0.30000001192092896 + Color: 204; 51; 204 + Scale: 1 + Value: true + Value: true + Enabled: false + Head Length: 0.30000001192092896 + Head Radius: 0.10000000149011612 + Name: Goal (pose) + Queue Size: 10 + Shaft Length: 1 + Shaft Radius: 0.05000000074505806 + Shape: Arrow + Topic: /field_local_planner/current_goal + Unreliable: false + Value: false + Enabled: true + Name: Local planner + Enabled: true + Name: Wild Visual Navigation + - Class: rviz/Group + Displays: + - Alpha: 1 + Autocompute Intensity Bounds: true + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: elevation + Color Transformer: GridMapLayer + ColorMap: terrain + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: false + Max Color: 255; 255; 255 + Max Intensity: 10 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Elevation + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: visual_traversability + Color Transformer: "" + ColorMap: coolwarm + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation_with_semantics + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Visual Traversability + Show Grid Lines: false + Topic: /elevation_mapping/semantic_map + Unreliable: false + Use ColorMap: true + Value: false + - Alpha: 1 + Autocompute Intensity Bounds: false + Class: grid_map_rviz_plugin/GridMap + Color: 200; 200; 200 + Color Layer: traversability + Color Transformer: "" + ColorMap: coolwarm + Enabled: false + Grid Cell Decimation: 1 + Grid Line Thickness: 0.10000000149011612 + Height Layer: elevation + Height Transformer: GridMapLayer + History Length: 1 + Invert ColorMap: true + Max Color: 255; 255; 255 + Max Intensity: 1 + Min Color: 0; 0; 0 + Min Intensity: 0 + Name: Geom Traversability + Show Grid Lines: true + Topic: /elevation_mapping/semantic_map + Unreliable: false + Use ColorMap: true + Value: false + Enabled: true + Name: GridMaps + Enabled: true + Global Options: + Background Color: 255; 255; 255 + Default Light: true + Fixed Frame: odom + Frame Rate: 30 + Name: root + Tools: + - Class: rviz/Interact + Hide Inactive Objects: true + - Class: rviz/MoveCamera + - Class: rviz/Select + - Class: rviz/FocusCamera + - Class: rviz/Measure + - Class: rviz/SetInitialPose + Theta std deviation: 0.2617993950843811 + Topic: /initialpose + X std deviation: 0.5 + Y std deviation: 0.5 + - Class: rviz/SetGoal + Topic: /move_base_simple/goal + - Class: rviz/PublishPoint + Single click: true + Topic: /clicked_point + Value: true + Views: + Current: + Class: rviz/ThirdPersonFollower + Distance: 13.404160499572754 + Enable Stereo Rendering: + Stereo Eye Separation: 0.05999999865889549 + Stereo Focal Distance: 1 + Swap Stereo Eyes: false + Value: false + Field of View: 0.7853981852531433 + Focal Point: + X: 2.1661949157714844 + Y: -0.48903560638427734 + Z: 0 + Focal Shape Fixed Size: true + Focal Shape Size: 0.05000000074505806 + Invert Z Axis: false + Name: Current View + Near Clip Distance: 0.009999999776482582 + Pitch: 0.4953981041908264 + Target Frame: base + Yaw: 0.4853982925415039 + Saved: ~ +Window Geometry: + AB Wide Angle Front: + collapsed: false + AB Wide Rear Front: + collapsed: false + Displays: + collapsed: false + F Confidence Overlay: + collapsed: false + F Traversability Overlay: + collapsed: false + HDR: + collapsed: false + Height: 1376 + Hide Left Dock: false + Hide Right Dock: false + Input Image: + collapsed: false + Learning Mask: + collapsed: false + QMainWindow State: 000000ff00000000fd0000000400000000000001a40000039cfc0200000012fb0000001200530065006c0065006300740069006f006e00000001e10000009b0000005c00fffffffb0000001e0054006f006f006c002000500072006f007000650072007400690065007302000005fb000001df00000185000000a3fb000000120056006900650077007300200054006f006f02000001df000002110000018500000122fb000000200054006f006f006c002000500072006f0070006500720074006900650073003203000002880000011d000002210000017afb000000100044006900730070006c00610079007301000001a70000039c000000c900fffffffb0000002000730065006c0065006300740069006f006e00200062007500660066006500720200000138000000aa0000023a00000294fb00000014005700690064006500530074006500720065006f02000000e6000000d2000003ee0000030bfb0000000c004b0069006e0065006300740200000186000001060000030c00000261fb0000000c00430061006d00650072006100000002ea000000c80000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640000000259000000520000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b000000027e000000b80000000000000000fb0000000a0049006d00610067006500000002eb000001280000000000000000fb000000140043006f006e0066006900640065006e0063006500000003b3000000c60000000000000000fb0000003e004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790020005200650070006c0061007900000004a40000009f0000000000000000fb00000028004600200054007200610076006500720073006100620069006c00690074007900200052006100770000000482000000c10000000000000000fb00000028004600200054007200610076006500720073006100620069006c00690074007900200052006100770000000482000000c10000000000000000fb0000000a0049006d0061006700650000000249000000210000000000000000fb0000000a0049006d0061006700650000000270000000200000000000000000000000010000010f0000039cfc0200000003fb0000001e0054006f006f006c002000500072006f00700065007200740069006500730100000041000000780000000000000000fb0000000a0056006900650077007301000001a70000039c000000a400fffffffb0000001200530065006c0065006300740069006f006e010000025a000000b2000000000000000000000002000009b800000164fc0100000015fb0000001c0049006d00610067006500200046006500610074007500720065007300000000bf000003690000000000000000fb000000060048004400520000000000000000950000005400fffffffb00000024004100420020005700690064006500200052006500610072002000460072006f006e00740100000000000001d6000000ba00fffffffc0000012a0000010c0000000000fffffffa000000000200000003fb0000000a0049006d0061006700650000000000ffffffff0000000000000000fb0000002400460072006f006e0074002000280063006f006d007000720065007300730065006400290000000000ffffffff0000000000000000fb0000001e00410053002000460072006f006e00740020002800630061006d00340029000000003d0000010c0000000000000000fc000002570000019b0000000000fffffffa000000000200000001fb0000000a0049006d0061006700650000000000ffffffff0000000000000000fc0000036f000001530000000000fffffffa000000010200000003fb000000160049006e00700075007400200049006d006100670065000000003d000001ac0000001600fffffffb0000001a005200200049006e00700075007400200049006d0061006700650000000000ffffffff0000000000000000fb0000001a004c00200049006e00700075007400200049006d0061006700650000000000ffffffff0000000000000000fb00000028005200200043006f006e0066006900640065006e006300650020004f007600650072006c0061007901000001dc000001ae000000c700fffffffb00000030005200200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c0061007901000003900000016e000000d500fffffffb00000026004100420020005700690064006500200041006e0067006c0065002000460072006f006e00740100000504000001b7000000c200fffffffb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790100000709000001540000000000000000fb00000030004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c0061007901000006c100000147000000d400fffffffb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c00610079010000080e000001aa000000c600fffffffb0000000a0049006d00610067006501000003240000018a0000000000000000fb0000002c0054007200610076006500720073006100620069006c0069007400790020004f007600650072006c0061007900000004ae0000010c0000000000000000fc00000846000000bd0000000000fffffffa000000000200000006fb000000160046002000540072006100760020004f0076006500720000000000ffffffff0000000000000000fb0000003e004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790020005200650070006c006100790100000000ffffffff0000000000000000fb000000240054007200610076006500720073006100620069006c0069007400790020005200610077000000003d0000012b0000001600fffffffb00000028005200200054007200610076006500720073006100620069006c00690074007900200052006100770000000000ffffffff0000000000000000fb00000028004c00200054007200610076006500720073006100620069006c00690074007900200052006100770000000000ffffffff0000000000000000fb00000030004600200054007200610076006500720073006100620069006c0069007400790020004f007600650072006c006100790000000000ffffffff0000000000000000fc00000909000000af0000000000fffffffa000000000200000005fb00000036004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790020005200650070006c006100790100000000ffffffff0000000000000000fb0000001a004c006500610072006e0069006e00670020004d00610073006b000000003d0000012b0000001600fffffffb00000020005200200043006f006e0066006900640065006e0063006500200052006100770000000000ffffffff0000000000000000fb00000020004c00200043006f006e0066006900640065006e0063006500200052006100770000000000ffffffff0000000000000000fb00000028004600200043006f006e0066006900640065006e006300650020004f007600650072006c006100790000000000ffffffff0000000000000000fb000000160050006c0061006300650068006f006c00640065007200000000000000074a0000000000000000fb000000160049006e00700075007400200049006d006100670065010000031e000001790000000000000000fb00000036005300750070006500720076006900730069006f006e0020002d00200049006d0061006700650020004c006100620065006c006500640100000659000001af0000000000000000fb00000024005300750070006500720076006900730069006f006e0020002d0020004d00610073006b010000080e000001aa0000000000000000fb0000000a00560069006500770073030000004e00000080000002e100000197000000030000074a00000042fc010000000dfb000000200052006500700072006f006a00650063007400650064002000500061007400680000000000000000ab000000ab00fffffffb00000024005300750070006500720076006900730069006f006e0020005300690067006e0061006c00000000b10000056e000000b000fffffffb0000001c0054007200610076006500720073006100620069006c00690074007900000006250000009a0000009100fffffffb000000160055006e006300650072007400610069006e0074007900000006c5000000850000008500fffffffb000000160050006c0061006300650068006f006c006400650072000000000000000eb80000000000000000fb0000001c0054007200610076006500720073006100620069006c006900740079010000031f0000017d0000000000000000fb000000160055006e006300650072007400610069006e0074007901000004a2000002960000000000000000fb000000140043006f006e0066006900640065006e006300650000000000000003d40000000000000000fb0000001c0054007200610076006500720073006100620069006c0069007400790000000231000001f20000000000000000fb0000001c004100530020004c0065006600740020002800630061006d003300290000000000000007380000000000000000fb0000001e004100530020005200690067006800740020002800630061006d0035002900000003f0000003480000000000000000fb0000000800540069006d00650000000000000007800000041800fffffffb0000000800540069006d00650100000000000004500000000000000000000006f90000039c00000004000000040000000800000008fc0000000100000002000000010000000a0054006f006f006c00730100000000ffffffff0000000000000000 + R Confidence Overlay: + collapsed: false + R Traversability Overlay: + collapsed: false + Reprojected Path: + collapsed: false + Selection: + collapsed: false + Supervision Signal: + collapsed: false + Time: + collapsed: false + Tool Properties: + collapsed: false + Traversability: + collapsed: false + Traversability Raw: + collapsed: false + Uncertainty: + collapsed: false + Views: + collapsed: false + Width: 2488 + X: 72 + Y: 27 diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/default.yaml b/wild_visual_navigation_ros/config/wild_visual_navigation/default.yaml index 20032e13..29406bc3 100644 --- a/wild_visual_navigation_ros/config/wild_visual_navigation/default.yaml +++ b/wild_visual_navigation_ros/config/wild_visual_navigation/default.yaml @@ -1,79 +1,57 @@ # Input topics robot_state_topic: "/wild_visual_navigation_node/robot_state" -# desired_twist_topic: "/motion_reference/command_twist" -desired_twist_topic: "/log/state/desiredRobotTwist" - -camera_topics: - front: - image_topic: "/alphasense_driver_ros/cam4/debayered" - info_topic: "/alphasense_driver_ros/cam4/camera_info" - use_for_training: true - # left: - # image_topic: "/alphasense_driver_ros/cam3/debayered" - # info_topic: "/alphasense_driver_ros/cam3/camera_info" - # use_for_training: false - # right: - # image_topic: "/alphasense_driver_ros/cam5/debayered" - # info_topic: "/alphasense_driver_ros/cam5/camera_info" - # use_for_training: false - +desired_twist_topic: "/motion_reference/command_twist" # Relevant frames fixed_frame: odom base_frame: base footprint_frame: footprint -# Robot size -robot_length: 1.0 -robot_width: 0.6 +# Robot size used to project the footprint +robot_length: 0.8 +robot_width: 0.3 robot_height: 0.3 -# Robot specs -robot_max_velocity: 0.8 - -# Traversability estimation params -traversability_radius: 3.0 # meters -image_graph_dist_thr: 0.2 # meters -proprio_graph_dist_thr: 0.1 # meters -network_input_image_height: 224 # 448 -network_input_image_width: 224 # 448 -segmentation_type: "slic" -feature_type: "dino" -dino_patch_size: 16 # DINO only -confidence_std_factor: 1.0 -scale_traversability: True -scale_traversability_max_fpr: 0.15 -min_samples_for_training: 5 - -# Optical flow params -optical_flow_estimator_type: "none" +# Traversability estimation parameters +traversability_radius: 3.0 # Maximum distance in meters of the supervision graph +image_graph_dist_thr: 0.2 # Minimum distance between mission nodes being added +supervision_graph_dist_thr: 0.1 # Minimum distance between supervision nodes being added +network_input_image_height: 224 # Unput image height +network_input_image_width: 224 # Input image width +segmentation_type: "stego" # Options: slic, grid, random, stego +feature_type: "stego" # Options: dino, dinov2, stego +dino_patch_size: 8 # Options: 8, 16; We found 8 is sufficient and faster +dino_backbone: vit_small # Options: vit_small +slic_num_components: 100 # Number of segments for slic +confidence_std_factor: 1.0 # Tuning parameter to change the confidence computation / explained in confidence_generator.py +min_samples_for_training: 5 # Minimum number of mission nodes with successfull reprojection before start training +prediction_per_pixel: True # If true, trained network is inferenced for each pixel, otherwise per segment +vis_node_index: 10 # Defines node which is used for visualization, can help to debug the projected footprint + # 1 -> indicates latest added mission node; 10 -> indicates the node added 10 steps in the past # Supervision Generator -untraversable_thr: 0.01 - -mission_name: "test" -mission_timestamp: True +untraversable_thr: 0.0 # Threshold for untraversable area -> this threshold is used to spawn special "collision walls" -> not used in the paper +mission_name: "test" # Name of the mission_folder used for logging +mission_timestamp: True # If true current timestamp is used as a prefix for the mission_folder # Threads -image_callback_rate: 3 # hertz -proprio_callback_rate: 4 # hertz -learning_thread_rate: 10 # hertz -logging_thread_rate: 2 # hertz +image_callback_rate: 10 # Maximum rate at which images are processed in hertz +supervision_callback_rate: 10 # Maximum rate at which supervision_signals are processed in hertz +learning_thread_rate: 10 # Gradient steps per second in hertz +logging_thread_rate: 2 # Logging of learning_node in hertz +status_thread_rate: 0.5 # Status of feature_extractor_node in hertz +load_save_checkpoint_rate: 1.0 # Rate at which checkpoints are saved / loaded between learning and feature extraction + # Runtime options device: "cuda" -mode: "online" # check out comments in the class WVNMode +mode: "online" # Options: "online", "debug", "extract_labels"; Sets operation mode of both nodes colormap: "RdYlBu" print_image_callback_time: false -print_proprio_callback_time: false -log_time: true -log_memory: true -log_confidence: true -verbose: false -debug_supervision_node_index_from_last: 10 - -use_debug_for_desired: true -extraction_store_folder: "nan" -exp: "nan" +print_supervision_callback_time: false +log_time: false +log_confidence: false +verbose: true +extraction_store_folder: "nan" \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/alphasense.yaml b/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/alphasense.yaml deleted file mode 100644 index 6eb15dde..00000000 --- a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/alphasense.yaml +++ /dev/null @@ -1,17 +0,0 @@ -camera_topics: - front: - image_topic: "/alphasense_driver_ros/cam4/debayered" - info_topic: "/alphasense_driver_ros/cam4/camera_info" - use_for_training: true - # left: - # image_topic: "/alphasense_driver_ros/cam3/debayered" - # info_topic: "/alphasense_driver_ros/cam3/camera_info" - # use_for_training: false - # right: - # image_topic: "/alphasense_driver_ros/cam5/debayered" - # info_topic: "/alphasense_driver_ros/cam5/camera_info" - # use_for_training: false - -# Provides 1080 (height) x 1920 (width) images -network_input_image_height: 224 # 448 -network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/alphasense_compressed.yaml b/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/alphasense_compressed.yaml deleted file mode 100644 index b8af1e27..00000000 --- a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/alphasense_compressed.yaml +++ /dev/null @@ -1,17 +0,0 @@ -camera_topics: - front: - image_topic: "/alphasense_driver_ros/cam4/debayered/compressed" - info_topic: "/alphasense_driver_ros/cam4/camera_info" - use_for_training: true - # left: - # image_topic: "/alphasense_driver_ros/cam3/debayered" - # info_topic: "/alphasense_driver_ros/cam3/camera_info" - # use_for_training: false - # right: - # image_topic: "/alphasense_driver_ros/cam5/debayered" - # info_topic: "/alphasense_driver_ros/cam5/camera_info" - # use_for_training: false - -# Provides 1080 (height) x 1920 (width) images -network_input_image_height: 224 # 448 -network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/wide_angle_front.yaml b/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/wide_angle_front.yaml deleted file mode 100644 index 970bcf6d..00000000 --- a/wild_visual_navigation_ros/config/wild_visual_navigation/inputs/wide_angle_front.yaml +++ /dev/null @@ -1,9 +0,0 @@ -camera_topics: - front: - image_topic: "/wide_angle_camera_front/image_color" - info_topic: "/wide_angle_camera_front/camera_info" - use_for_training: true - -# Provides 1080 (height) x 1920 (width) images -network_input_image_height: 224 # 448 -network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/resized_all.yaml b/wild_visual_navigation_ros/config/wild_visual_navigation/resized_all.yaml new file mode 100644 index 00000000..ffd199db --- /dev/null +++ b/wild_visual_navigation_ros/config/wild_visual_navigation/resized_all.yaml @@ -0,0 +1,26 @@ +camera_topics: + front: + image_topic: "/wide_angle_camera_front/image_color_rect_resize" + info_topic: "/wide_angle_camera_front_resize/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + rear: + image_topic: "/wide_angle_camera_rear/image_color_rect_resize" + info_topic: "/wide_angle_camera_rear_resize/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + + hdr: + image_topic: "/hdr_camera_resize/image_raw" + info_topic: "/hdr_camera_resize/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_ros/config/wild_visual_navigation/resized_wide_angle_dual.yaml b/wild_visual_navigation_ros/config/wild_visual_navigation/resized_wide_angle_dual.yaml new file mode 100644 index 00000000..ace08bcc --- /dev/null +++ b/wild_visual_navigation_ros/config/wild_visual_navigation/resized_wide_angle_dual.yaml @@ -0,0 +1,19 @@ +camera_topics: + front: + image_topic: "/wide_angle_camera_front/image_color_rect_resize" + info_topic: "/wide_angle_camera_front_resize/camera_info" + use_for_training: false + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + rear: + image_topic: "/wide_angle_camera_rear/image_color_rect_resize" + info_topic: "/wide_angle_camera_rear_resize/camera_info" + use_for_training: true + publish_confidence: true + publish_input_image: true + scheduler_weight: 1 + +# Provides 1080 (height) x 1920 (width) images +network_input_image_height: 224 # 448 +network_input_image_width: 224 # 448 \ No newline at end of file diff --git a/wild_visual_navigation_ros/launch/elevation_mapping_cupy.launch b/wild_visual_navigation_ros/launch/elevation_mapping_cupy.launch index f741481c..db6a5619 100644 --- a/wild_visual_navigation_ros/launch/elevation_mapping_cupy.launch +++ b/wild_visual_navigation_ros/launch/elevation_mapping_cupy.launch @@ -2,8 +2,8 @@ - - + + \ No newline at end of file diff --git a/wild_visual_navigation_ros/launch/overlay_images.launch b/wild_visual_navigation_ros/launch/overlay_images.launch index 6d8c0288..39db6505 100644 --- a/wild_visual_navigation_ros/launch/overlay_images.launch +++ b/wild_visual_navigation_ros/launch/overlay_images.launch @@ -1,12 +1,23 @@ - + - + + + + + + + + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_ros/launch/record_rosbag_debug.launch b/wild_visual_navigation_ros/launch/record_rosbag_debug.launch deleted file mode 100644 index 4f3b9549..00000000 --- a/wild_visual_navigation_ros/launch/record_rosbag_debug.launch +++ /dev/null @@ -1,95 +0,0 @@ - - - - - - - - - - diff --git a/wild_visual_navigation_ros/launch/robot.launch b/wild_visual_navigation_ros/launch/robot.launch deleted file mode 100644 index 9eb92dff..00000000 --- a/wild_visual_navigation_ros/launch/robot.launch +++ /dev/null @@ -1,16 +0,0 @@ - - - - - - - - - - - - - - - - diff --git a/wild_visual_navigation_ros/launch/smart_carrot.launch b/wild_visual_navigation_ros/launch/smart_carrot.launch new file mode 100644 index 00000000..75655e21 --- /dev/null +++ b/wild_visual_navigation_ros/launch/smart_carrot.launch @@ -0,0 +1,16 @@ + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_ros/launch/view.launch b/wild_visual_navigation_ros/launch/view.launch index 6fd0912e..ade9d973 100644 --- a/wild_visual_navigation_ros/launch/view.launch +++ b/wild_visual_navigation_ros/launch/view.launch @@ -1,5 +1,5 @@ - + diff --git a/wild_visual_navigation_ros/launch/wild_visual_navigation.launch b/wild_visual_navigation_ros/launch/wild_visual_navigation.launch index cd75f09c..0e7f8042 100644 --- a/wild_visual_navigation_ros/launch/wild_visual_navigation.launch +++ b/wild_visual_navigation_ros/launch/wild_visual_navigation.launch @@ -1,22 +1,28 @@ - - + - + + + + - - - + + + + - - - + + + + + + - + diff --git a/wild_visual_navigation_ros/scripts/histogram.py b/wild_visual_navigation_ros/scripts/histogram.py deleted file mode 100644 index 2f7b09b3..00000000 --- a/wild_visual_navigation_ros/scripts/histogram.py +++ /dev/null @@ -1,29 +0,0 @@ -import rospy -import cv2 -import matplotlib.pyplot as plt -from cv_bridge import CvBridge -from sensor_msgs.msg import Image -import numpy as np - - -class HistogramNode: - def __init__(self): - self.bridge = CvBridge() - self.image_sub = rospy.Subscriber("/wild_visual_navigation_node/front/traversability", Image, self.callback) - - def callback(self, msg): - # Convert the ROS Image message to a numpy array - img = self.bridge.imgmsg_to_cv2(msg, desired_encoding="passthrough") - - # Compute the histogram of the image - hist = np.histogram(img.flatten(), bins=100) - - # Plot the histogram using matplotlib - plt.hist(img, bins=100, range=[0, 1]) - plt.savefig("histogram.png") - - -if __name__ == "__main__": - rospy.init_node("histogram_node") - node = HistogramNode() - rospy.spin() diff --git a/wild_visual_navigation_ros/scripts/overlay_images.py b/wild_visual_navigation_ros/scripts/overlay_images.py index 943cfc36..b5c32371 100644 --- a/wild_visual_navigation_ros/scripts/overlay_images.py +++ b/wild_visual_navigation_ros/scripts/overlay_images.py @@ -1,6 +1,11 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import message_filters import rospy -from sensor_msgs.msg import Image, CameraInfo, CompressedImage +from sensor_msgs.msg import Image # , CameraInfo # , CompressedImage import wild_visual_navigation_ros.ros_converter as rc from wild_visual_navigation.visu import LearningVisualizer import sys @@ -12,8 +17,9 @@ def __init__(self): self.value_sub_topic = rospy.get_param("~value_sub_topic") self.image_pub_topic = rospy.get_param("~image_pub_topic") - self.input_pub = rospy.Publisher(f"~{self.image_pub_topic}", Image, queue_size=10) + self._pub = rospy.Publisher(f"~{self.image_pub_topic}", Image, queue_size=1) self._visualizer = LearningVisualizer() + image_sub = message_filters.Subscriber(self.image_sub_topic, Image) trav_sub = message_filters.Subscriber(self.value_sub_topic, Image) sync = message_filters.ApproximateTimeSynchronizer([image_sub, trav_sub], queue_size=2, slop=0.5) @@ -23,15 +29,22 @@ def callback(self, image_msg, trav_msgs): torch_image = rc.ros_image_to_torch(image_msg, device="cpu") torch_trav = rc.ros_image_to_torch(trav_msgs, device="cpu", desired_encoding="passthrough") img_out = self._visualizer.plot_detectron_classification(torch_image, torch_trav.clip(0, 1)) - self.input_pub.publish(rc.numpy_to_ros_image(img_out)) + ros_msg = rc.numpy_to_ros_image(img_out) + ros_msg.header.stamp = image_msg.header.stamp + self._pub.publish(ros_msg) if __name__ == "__main__": print("Stated ImageOverlayNode") try: - nr = "_" + rospy.myargv(argv=sys.argv)[-1].split(" ")[-1] - rospy.init_node(f"wild_visual_navigation_visu{nr}") - except: + args = rospy.myargv(argv=sys.argv) + if "--nr" in args: + nr_index = args.index("--nr") + nr = args[nr_index + 1] + else: + nr = "0" # Handle case when no arg is set + rospy.init_node(f"wild_visual_navigation_visu_{nr}") + except Exception: rospy.init_node("wild_visual_navigation_visu") wvn = ImageOverlayNode() diff --git a/wild_visual_navigation_ros/scripts/path_length.py b/wild_visual_navigation_ros/scripts/path_length.py deleted file mode 100644 index 85065225..00000000 --- a/wild_visual_navigation_ros/scripts/path_length.py +++ /dev/null @@ -1,24 +0,0 @@ -import rospy -from nav_msgs.msg import Path - -import numpy as np - - -def callback(data, args): - (topic_name,) = args - path_length = 0 - for i in range(len(data.poses) - 1): - p1 = np.array([data.poses[i].pose.position.x, data.poses[i].pose.position.y, data.poses[i].pose.position.z]) - p2 = np.array( - [data.poses[i + 1].pose.position.x, data.poses[i + 1].pose.position.y, data.poses[i + 1].pose.position.z] - ) - path_length += np.linalg.norm(p2 - p1) - print("Received path on topic {} with length {} meters".format(topic_name, path_length)) - - -if __name__ == "__main__": - rospy.init_node("path_subscriber") - topic_names = ["/gps_trajectory1", "/gps_trajectory2", "/gps_trajectory3", "/gps_trajectory4", "/gps_trajectory5"] - for topic_name in topic_names: - rospy.Subscriber(topic_name, Path, callback, callback_args=(topic_name,)) - rospy.spin() diff --git a/wild_visual_navigation_ros/scripts/pickle_traversability_estimator.py b/wild_visual_navigation_ros/scripts/pickle_traversability_estimator.py deleted file mode 100755 index 6fa74bc5..00000000 --- a/wild_visual_navigation_ros/scripts/pickle_traversability_estimator.py +++ /dev/null @@ -1,24 +0,0 @@ -#!/usr/bin/python3 -from wild_visual_navigation.traversability_estimator import TraversabilityEstimator -from os.path import exists -import argparse -import torch - -torch.cuda.empty_cache() - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Interface to debug traversability estimator training via pickle") - parser.add_argument("--pickle_file", type=str, help="Pickle file path") - parser.add_argument("--device", type=str, help="device where to load the traversability estimator", default="cuda") - args = parser.parse_args() - - # Check if file exists - if not exists(args.pickle_file): - raise ValueError(f"Argument pickle file [{args.pickle_file}] doesn't exist") - - te = TraversabilityEstimator.load(args.pickle_file, device=args.device) - - # Make training thread - while True: - print("New iteration") - te.train() diff --git a/wild_visual_navigation_ros/scripts/ros_visualizer.py b/wild_visual_navigation_ros/scripts/ros_visualizer.py deleted file mode 100644 index 5b1d3e50..00000000 --- a/wild_visual_navigation_ros/scripts/ros_visualizer.py +++ /dev/null @@ -1,140 +0,0 @@ -import rospy -import tf2_ros -from sensor_msgs.msg import PointCloud2, CameraInfo, Image, Image -from grid_map_msgs.msg import GridMap, GridMapInfo -from std_msgs.msg import Float32MultiArray, MultiArrayDimension -from geometry_msgs.msg import TransformStamped -import numpy as np -import ros_numpy - - -class SimpleNumpyToRviz: - def __init__(self, init_node=True, postfix=""): - rospy.init_node("numpy_to_rviz", anonymous=False) - self.pub_pointcloud = rospy.Publisher(f"~pointcloud{postfix}", PointCloud2, queue_size=1) - self.pub_gridmap = rospy.Publisher(f"~gridmap{postfix}", GridMap, queue_size=1) - self.pub_image = rospy.Publisher(f"~image{postfix}", Image, queue_size=1) - self.pub_camera_info = rospy.Publisher(f"~camera_info{postfix}", CameraInfo, queue_size=1) - self.br = tf2_ros.TransformBroadcaster() - - def camera_info(self, camera_info): - pass - - def tf(self, msg, reference_frame="crl_rzr/map"): - t = TransformStamped() - t.header.stamp = rospy.Time.now() - t.header.frame_id = reference_frame - t.child_frame_id = msg[3] - t.transform.translation.x = msg[4][0] - t.transform.translation.y = msg[4][1] - t.transform.translation.z = msg[4][2] - t.transform.rotation.x = msg[5][0] - t.transform.rotation.y = msg[5][1] - t.transform.rotation.z = msg[5][2] - t.transform.rotation.w = msg[5][3] - self.br.sendTransform(t) - - def image(self, img): - pass - - def pointcloud(self, points, reference_frame="sensor_gravity"): - data = np.zeros(points.shape[0], dtype=[("x", np.float32), ("y", np.float32), ("z", np.float32)]) - data["x"] = points[:, 0] - data["y"] = points[:, 1] - data["z"] = points[:, 2] - msg = ros_numpy.msgify(PointCloud2, data) - - msg.header.frame_id = reference_frame - - self.pub_pointcloud.publish(msg) - - def gridmap(self, msg, publish=True): - data_in = msg[0] - - size_x = data_in["data"].shape[1] - size_y = data_in["data"].shape[2] - - data_dim_0 = MultiArrayDimension() - data_dim_0.label = "column_index" # y dimension - data_dim_0.size = size_y # number of columns which is y - data_dim_0.stride = size_y * size_x # rows*cols - data_dim_1 = MultiArrayDimension() - data_dim_1.label = "row_index" # x dimension - data_dim_1.size = size_x # number of rows which is x - data_dim_1.stride = size_x # number of rows - layers = [] - data = [] - - for i in range(data_in["data"].shape[0]): - data_tmp = Float32MultiArray() - data_tmp.layout.dim.append(data_dim_0) - data_tmp.layout.dim.append(data_dim_1) - data_tmp.data = data_in["data"][i, ::-1, ::-1].transpose().ravel() - data.append(data_tmp) - - info = GridMapInfo() - info.pose.position.x = data_in["position"][0] - info.pose.position.y = data_in["position"][1] - info.pose.position.z = data_in["position"][2] - info.pose.orientation.x = data_in["orientation_xyzw"][0] - info.pose.orientation.y = data_in["orientation_xyzw"][1] - info.pose.orientation.z = data_in["orientation_xyzw"][2] - info.pose.orientation.z = data_in["orientation_xyzw"][3] - info.header.seq = msg[1] - # info.header.stamp.secs = msg[2]vis - info.header.stamp = rospy.Time.now() - info.resolution = data_in["resolution"] - info.length_x = size_x * data_in["resolution"] - info.length_y = size_y * data_in["resolution"] - - gm_msg = GridMap(info=info, layers=data_in["layers"], basic_layers=data_in["basic_layers"], data=data) - - if publish: - self.pub_gridmap.publish(gm_msg) - - return gm_msg - - def gridmap_arr(self, arr, res, layers, reference_frame="sensor_gravity", publish=True, x=0, y=0): - size_x = arr.shape[1] - size_y = arr.shape[2] - - data_dim_0 = MultiArrayDimension() - data_dim_0.label = "column_index" # y dimension - data_dim_0.size = size_y # number of columns which is y - data_dim_0.stride = size_y * size_x # rows*cols - data_dim_1 = MultiArrayDimension() - data_dim_1.label = "row_index" # x dimension - data_dim_1.size = size_x # number of rows which is x - data_dim_1.stride = size_x # number of rows - data = [] - - for i in range(arr.shape[0]): - data_tmp = Float32MultiArray() - data_tmp.layout.dim.append(data_dim_0) - data_tmp.layout.dim.append(data_dim_1) - data_tmp.data = arr[i, ::-1, ::-1].transpose().ravel() - data.append(data_tmp) - - info = GridMapInfo() - info.pose.orientation.w = 1 - info.header.seq = 0 - info.header.stamp = rospy.Time.now() - info.resolution = res - info.length_x = size_x * res - info.length_y = size_y * res - info.header.frame_id = reference_frame - info.pose.position.x = x - info.pose.position.y = y - gm_msg = GridMap(info=info, layers=layers, basic_layers=[], data=data) - if publish: - self.pub_gridmap.publish(gm_msg) - return gm_msg - - -if __name__ == "__main__": - import time - import pickle as pkl - from os.path import join - import torch - - vis = SimpleNumpyToRviz() diff --git a/wild_visual_navigation_ros/scripts/rosbag_play.sh b/wild_visual_navigation_ros/scripts/rosbag_play.sh index 902b2470..a6a39dad 100755 --- a/wild_visual_navigation_ros/scripts/rosbag_play.sh +++ b/wild_visual_navigation_ros/scripts/rosbag_play.sh @@ -4,37 +4,39 @@ args="" for option in "$@"; do if [ "$option" == "--sem" ]; then args="$args /elevation_mapping/elevation_map_raw:=/recorded/elevation_mapping/elevation_map_raw \ - /elevation_mapping/semantic_map_raw:=/recorded/elevation_mapping/semantic_map_raw" + /elevation_mapping/semantic_map:=/recorded/elevation_mapping/semantic_map" elif [ "$option" == "--wvn" ]; then - args="$args /wild_visual_navigation_node/front/camera_info:=/recorded_wvn/wild_visual_navigation_node/front/camera_info \ - /wild_visual_navigation_node/front/confidence:=/recorded_wvn/wild_visual_navigation_node/front/confidence \ - /wild_visual_navigation_node/front/image_input:=/recorded_wvn/wild_visual_navigation_node/front/image_input \ - /wild_visual_navigation_node/front/traversability:=/recorded_wvn/wild_visual_navigation_node/front/traversability \ - /wild_visual_navigation_node/graph_footprints:=/recorded_wvn/wild_visual_navigation_node/graph_footprints \ - /wild_visual_navigation_node/instant_traversability:=/recorded_wvn/wild_visual_navigation_node/instant_traversability \ - /wild_visual_navigation_node/proprioceptive_graph:=/recorded_wvn/wild_visual_navigation_node/proprioceptive_graph \ - /wild_visual_navigation_node/robot_state:=/recorded_wvn/wild_visual_navigation_node/robot_state \ - /wild_visual_navigation_node/system_state:=/recorded_wvn/wild_visual_navigation_node/system_state \ - /wild_visual_navigation_visu_confidence/confidence_overlayed:=/recorded_wvn/wild_visual_navigation_visu_confidence/confidence_overlayed \ - /wild_visual_navigation_visu_traversability/traversability_overlayed:=/recorded_wvn/wild_visual_navigation_visu_traversability/traversability_overlayed" + args="$args /wild_visual_navigation_node/front/camera_info:=/recorded/wild_visual_navigation_node/front/camera_info \ + /wild_visual_navigation_node/front/confidence:=/recorded/wild_visual_navigation_node/front/confidence \ + /wild_visual_navigation_node/front/feat:=/recorded/wild_visual_navigation_node/front/feat \ + /wild_visual_navigation_node/front/image_input:=/recorded/wild_visual_navigation_node/front/image_input \ + /wild_visual_navigation_node/front/traversability:=/recorded/wild_visual_navigation_node/front/traversability \ + /wild_visual_navigation_node/graph_footprints:=/recorded/wild_visual_navigation_node/graph_footprints \ + /wild_visual_navigation_node/graph_footprints_array:=/recorded/wild_visual_navigation_node/graph_footprints_array \ + /wild_visual_navigation_node/mission_graph:=/recorded/wild_visual_navigation_node/mission_graph \ + /wild_visual_navigation_node/rear/camera_info:=/recorded/wild_visual_navigation_node/rear/camera_info \ + /wild_visual_navigation_node/rear/confidence:=/recorded/wild_visual_navigation_node/rear/confidence \ + /wild_visual_navigation_node/rear/image_input:=/recorded/wild_visual_navigation_node/rear/image_input \ + /wild_visual_navigation_node/rear/traversability:=/recorded/wild_visual_navigation_node/rear/traversability \ + /wild_visual_navigation_node/supervision_graph:=/recorded/wild_visual_navigation_node/supervision_graph \ + /wild_visual_navigation_visu_front_trav/traversability:=/recorded/wild_visual_navigation_visu_front_trav/traversability \ + /wild_visual_navigation_visu_rear_trav/traversability:=/recorded/wild_visual_navigation_visu_rear_trav/traversability" elif [ "$option" == "--flp" ]; then - args="$args /field_local_planner/action_server/status:=/recorded_flp/field_local_planner/action_server/status \ - /field_local_planner/current_base:=/recorded_flp/field_local_planner/current_base \ - /field_local_planner/current_goal:=/recorded_flp/field_local_planner/current_goal \ - /field_local_planner/parameter_descriptions:=/recorded_flp/field_local_planner/parameter_descriptions \ - /field_local_planner/parameter_updates:=/recorded_flp/field_local_planner/parameter_updates \ - /field_local_planner/path:=/recorded_flp/field_local_planner/path \ - /field_local_planner/real_carrot:=/recorded_flp/field_local_planner/real_carrot \ - /field_local_planner/rmp/control_points:=/recorded_flp/field_local_planner/rmp/control_points \ - /field_local_planner/rmp/parameter_descriptions:=/recorded_flp/field_local_planner/rmp/parameter_descriptions \ - /field_local_planner/rmp/parameter_updates:=/recorded_flp/field_local_planner/rmp/parameter_updates \ - /field_local_planner/status:=/recorded_flp/field_local_planner/status \ - /elevation_mapping/elevation_map_wifi:=/recorded_flp/elevation_mapping/elevation_map_wifi" + args="$args /field_local_planner/action_server/status:=/recorded/field_local_planner/action_server/status \ + /field_local_planner/current_base:=/recorded/field_local_planner/current_base \ + /field_local_planner/current_goal:=/recorded/field_local_planner/current_goal \ + /field_local_planner/parameter_descriptions:=/recorded/field_local_planner/parameter_descriptions \ + /field_local_planner/parameter_updates:=/recorded/field_local_planner/parameter_updates \ + /field_local_planner/path:=/recorded/field_local_planner/path \ + /field_local_planner/real_carrot:=/recorded/field_local_planner/real_carrot \ + /field_local_planner/rmp/control_points:=/recorded/field_local_planner/rmp/control_points \ + /field_local_planner/rmp/parameter_descriptions:=/recorded/field_local_planner/rmp/parameter_descriptions \ + /field_local_planner/rmp/parameter_updates:=/recorded/field_local_planner/rmp/parameter_updates \ + /field_local_planner/status:=/recorded/field_local_planner/status \ + /elevation_mapping/elevation_map_wifi:=/recorded/elevation_mapping/elevation_map_wifi" elif [ "$option" == "--tf" ]; then - args="$args /tf:=/recorded_flp/tf" - - # /tf_static:=/recorded_flp/tf_static" - + args="$args /tf:=/recorded/tf" + # /tf_static:=/recorded/tf_static" echo "rosrun anymal_rsl_launch replay.py c /media/Data/Datasets/2023_Oxford_Testing/2023_01_27_Oxford_Park/mission_data/2023-01-27-11-00-22/2023-01-27-11-00-22_anymal-coyote-lpc_mission.yaml" else args="$args $option" diff --git a/wild_visual_navigation_ros/scripts/shift_gridmap.py b/wild_visual_navigation_ros/scripts/shift_gridmap.py deleted file mode 100644 index ea1efa79..00000000 --- a/wild_visual_navigation_ros/scripts/shift_gridmap.py +++ /dev/null @@ -1,56 +0,0 @@ -from grid_map_msgs.msg import GridMap, GridMapInfo -import rospy -import sys -import tf -from tf.transformations import euler_from_quaternion -import math -import numpy as np - - -class ShiftGridMapNode: - def __init__(self): - self.gridmap_sub_topic = rospy.get_param("~gridmap_sub_topic", "/recorded/elevation_map_wifi") - print("Subscribed", self.gridmap_sub_topic) - self.gridmap_pub_topic = rospy.get_param("~gridmap_pub_topic", "elevation_map_wifi_shifted") - self.offset_factor_x = rospy.get_param("~offset_factor_x") - self.offset_factor_y = rospy.get_param("~offset_factor_y") - self.compensate_tf = rospy.get_param("~compensate_tf", False) - - if self.compensate_tf: - self.listener = tf.TransformListener() - - self.pub = rospy.Publisher(f"~{self.gridmap_pub_topic}", GridMap, queue_size=10) - rospy.Subscriber(f"~{self.gridmap_sub_topic}", GridMap, self.callback, queue_size=1) - - def callback(self, msg): - delta_x = msg.info.length_x * self.offset_factor_x - delta_y = msg.info.length_y * self.offset_factor_y - - if self.compensate_tf: - try: - (trans, rot) = self.listener.lookupTransform(msg.info.header.frame_id, "base", msg.info.header.stamp) - except (tf.LookupException, tf.ConnectivityException, tf.ExtrapolationException): - return - yaw = euler_from_quaternion(rot)[2] - else: - yaw = 0 - - msg.info.pose.position.x += delta_x * math.cos(yaw) - delta_y * math.sin(yaw) - msg.info.pose.position.y += delta_x * math.sin(yaw) + delta_y * math.cos(yaw) - self.pub.publish(msg) - - -if __name__ == "__main__": - print("Starting Shift GridMap Node!") - # rosrun wild_visual_navigation_ros shift_gridmap.py 0 _gridmap_sub_topic:=/elevation_mapping/elevation_map_wifi _gridmap_pub_topic:=/left _offset_factor_x:=10 _offset_factor_y:=0 - - try: - nr = "_" + rospy.myargv(argv=sys.argv)[-1].split(" ")[-1] - rospy.init_node(f"wild_visual_navigation_gridmap_{nr}") - except: - rospy.init_node("wild_visual_navigation_gridmap") - - wvn = ShiftGridMapNode() - rospy.spin() - - print("Finished Shift GridMap Node!") diff --git a/wild_visual_navigation_ros/scripts/smart_carrot.py b/wild_visual_navigation_ros/scripts/smart_carrot.py index f2773d58..e46e0d63 100644 --- a/wild_visual_navigation_ros/scripts/smart_carrot.py +++ b/wild_visual_navigation_ros/scripts/smart_carrot.py @@ -1,117 +1,167 @@ -from grid_map_msgs.msg import GridMap, GridMapInfo -import rospy import sys import numpy as np -import tf2_ros from scipy.spatial.transform import Rotation as R import cv2 import math -from geometry_msgs.msg import PoseWithCovarianceStamped import time +import rospy +from grid_map_msgs.msg import GridMap, GridMapInfo +from geometry_msgs.msg import PoseWithCovarianceStamped +from dynamic_reconfigure.server import Server +import tf2_ros + class SmartCarrotNode: def __init__(self): - self.gridmap_sub_topic = "/elevation_mapping/elevation_map_wifi" # rospy.get_param("~gridmap_sub_topic", "/elevation_mapping/elevation_map_wifi") - self.pub = rospy.Publisher(f"~binary_mask", GridMap, queue_size=5) - self.pub_goal = rospy.Publisher(f"/initialpose", PoseWithCovarianceStamped, queue_size=5) - self.tf_buffer = tf2_ros.Buffer(cache_time=rospy.Duration(3.0)) + # ROS topics + self.gridmap_sub_topic = rospy.get_param("~gridmap_sub_topic") + self.debug_pub_topic = rospy.get_param("~debug_pub_topic") + self.goal_pub_topic = rospy.get_param("~goal_pub_topic") + self.map_frame = rospy.get_param("~map_frame") + self.base_frame = rospy.get_param("~base_frame") + # Operation Mode + self.debug = rospy.get_param("~debug") + + # Parameters + self.distance_force_factor = rospy.get_param("~distance_force_factor") + self.center_force_factor = rospy.get_param("~center_force_factor") + + # TODO this could be implemented + self.filter_chain_funcs = [] + if self.distance_force_factor > 0: + self.filter_chain_funcs.append(self.apply_distance_force) + self.distance_force = None + + if self.center_force_factor > 0: + self.filter_chain_funcs.append(self.apply_center_force) + + def distance_to_line(array, x_cor, y_cor, yaw, start_x, start_y): + return np.abs(np.cos(yaw) * (x_cor - start_x) - np.sin(yaw) * (y_cor - start_y)) + + self.vdistance_to_line = np.vectorize(distance_to_line) + + # Initialize ROS publishers + self.pub = rospy.Publisher(f"~{self.debug_pub_topic}", GridMap, queue_size=5) + self.pub_goal = rospy.Publisher(self.goal_pub_topic, PoseWithCovarianceStamped, queue_size=5) + + # Initialize TF listener + self.tf_buffer = tf2_ros.Buffer(cache_time=rospy.Duration(10.0)) self.tf_listener = tf2_ros.TransformListener(self.tf_buffer) + self.sub = rospy.Subscriber(self.gridmap_sub_topic, GridMap, self.callback, queue_size=5) + + def apply_distance_force(self, yaw, sdf): + if self.distance_force is None: + # Create the distance force + self.distance_force = np.zeros((sdf.shape[0], sdf.shape[1])) + for x in range(sdf.shape[0]): + for y in range(sdf.shape[1]): + self.distance_force[x, y] = math.sqrt( + (x - int(sdf.shape[0] / 2)) ** 2 + (y - int(sdf.shape[1] / 2)) ** 2 + ) + self.distance_force /= self.distance_force.max() + self.distance_force *= self.distance_force_factor + return sdf + self.distance_force + + def apply_center_force(self, yaw, sdf): + xv, yv = np.meshgrid(np.arange(0, sdf.shape[0]), np.arange(0, sdf.shape[1])) + center_force = self.vdistance_to_line(sdf, xv, yv, yaw, int(sdf.shape[0] / 2), int(sdf.shape[1] / 2)) + return sdf - center_force * self.center_force_factor + + def get_pattern_mask(self, H, W, yaw): + # Defines a pattern based on the yaw of the robot where we search for a minimum within the SDF + binary_mask = np.zeros((H, W), dtype=np.uint8) + distance = 30 + center_x = int(H / 2 + math.sin(yaw) * distance) + center_y = int(W / 2 + math.cos(yaw) * distance) + binary_mask = cv2.circle(binary_mask, (center_x, center_y), 0, 255, 30) + distance = 55 + center_x = int(H / 2 + math.sin(yaw) * distance) + center_y = int(W / 2 + math.cos(yaw) * distance) + binary_mask = cv2.circle(binary_mask, (center_x, center_y), 0, 255, 40) + distance = 90 + center_x = int(H / 2 + math.sin(yaw) * distance) + center_y = int(W / 2 + math.cos(yaw) * distance) + binary_mask = cv2.circle(binary_mask, (center_x, center_y), 0, 255, 50) - self.sub = rospy.Subscriber(f"~{self.gridmap_sub_topic}", GridMap, self.callback, queue_size=10) - - self.offset = np.zeros((200, 200)) - for x in range(200): - for y in range(200): - self.offset[x, y] = math.sqrt((x - 100) ** 2 + (y - 100) ** 2) + return binary_mask == 0 - self.offset /= self.offset.max() - self.offset *= 0.5 - self.debug = True + def get_elevation_mask(self, elevation_layer): + invalid_elevation = np.isnan(elevation_layer) + # Increase the size of the invalid elevation to reduce noise + kernel = np.ones((3, 3), np.uint8) + invalid_elevation = cv2.dilate(np.uint8(invalid_elevation) * 255, kernel, iterations=1) == 255 + return invalid_elevation def callback(self, msg): - st = time.time() - target_layer = "sdf" - if target_layer in msg.layers: - # extract grid_map layer as numpy array - data_list = msg.data[msg.layers.index(target_layer)].data - layout_info = msg.data[msg.layers.index(target_layer)].layout - n_cols = layout_info.dim[0].size - n_rows = layout_info.dim[1].size - sdf = np.reshape(np.array(data_list), (n_rows, n_cols)) - sdf = sdf[::-1, ::-1].transpose().astype(np.float32) - - target_layer = "elevation" - if target_layer in msg.layers: - # extract grid_map layer as numpy array - data_list = msg.data[msg.layers.index(target_layer)].data - layout_info = msg.data[msg.layers.index(target_layer)].layout - n_cols = layout_info.dim[0].size - n_rows = layout_info.dim[1].size - elevation = np.reshape(np.array(data_list), (n_rows, n_cols)) - elevation = elevation[::-1, ::-1].transpose().astype(np.float32) + print("called callback") + # Convert GridMap to numpy array + layers = {} + for layer_name in ["sdf", "elevation"]: + if layer_name in msg.layers: + data_list = msg.data[msg.layers.index(layer_name)].data + layout_info = msg.data[msg.layers.index(layer_name)].layout + n_cols = layout_info.dim[0].size + n_rows = layout_info.dim[1].size + layer = np.reshape(np.array(data_list), (n_rows, n_cols)) + layer = layer[::-1, ::-1].transpose().astype(np.float32) + layers[layer_name] = layer + else: + rospy.logwarn(f"Layer {layer_name} not found in GridMap") + return False try: res = self.tf_buffer.lookup_transform( - "odom", "base_inverted_field_local_planner", msg.info.header.stamp, timeout=rospy.Duration(0.01) + self.map_frame, self.base_frame, msg.info.header.stamp, timeout=rospy.Duration(0.01) ) except Exception as e: - print("error") - print("Error in query tf: ", e) - rospy.logwarn(f"Couldn't get between odom and base") - return + error = str(e) + rospy.logwarn(f"Couldn't get between odom and base {error}") + return False - yaw = R.from_quat( - [res.transform.rotation.x, res.transform.rotation.y, res.transform.rotation.z, res.transform.rotation.w] - ).as_euler("zxy", degrees=False)[0] + H, W = layers["sdf"].shape + rot = res.transform.rotation + yaw = R.from_quat([rot.x, rot.y, rot.z, rot.w]).as_euler("zxy", degrees=False)[0] - binary_mask = np.zeros((sdf.shape[0], sdf.shape[1]), dtype=np.uint8) + mask_pattern = self.get_pattern_mask(H, W, yaw) + mask_elevation = self.get_elevation_mask(layers["elevation"]) - distance = 30 # sdf.shape[0] / 5 - center_x = int(sdf.shape[0] / 2 + math.sin(yaw) * distance) - center_y = int(sdf.shape[1] / 2 + math.cos(yaw) * distance) - binary_mask = cv2.circle(binary_mask, (center_x, center_y), 0, 255, 30) - distance = 55 # sdf.shape[0] / 3 - center_x = int(sdf.shape[0] / 2 + math.sin(yaw) * distance) - center_y = int(sdf.shape[1] / 2 + math.cos(yaw) * distance) - binary_mask = cv2.circle(binary_mask, (center_x, center_y), 0, 255, 40) - distance = 90 # sdf.shape[0] / 2 - center_x = int(sdf.shape[0] / 2 + math.sin(yaw) * distance) - center_y = int(sdf.shape[1] / 2 + math.cos(yaw) * distance) - binary_mask = cv2.circle(binary_mask, (center_x, center_y), 0, 255, 50) - m = binary_mask == 0 - sdf += self.offset - m2 = np.isnan(elevation) + for filter_func in self.filter_chain_funcs: + layers["sdf"] = filter_func(yaw, layers["sdf"]) - kernel = np.ones((3, 3), np.uint8) - m2 = cv2.dilate(np.uint8(m2) * 255, kernel, iterations=1) == 255 - sdf[m] = sdf.min() - sdf[m2] = sdf.min() + layers["sdf"][mask_pattern] = -np.inf + layers["sdf"][mask_elevation] = -np.inf - if sdf.min() == sdf.max(): + if layers["sdf"].min() == layers["sdf"].max(): + rospy.logwarn(f"No valid elevation within the SDF of the defined pattern {e}") return - x, y = np.where(sdf == sdf.max()) + # Get index of the maximum gridmax cell index within the SDF + x, y = np.where(layers["sdf"] == layers["sdf"].max()) x = x[0] y = y[0] - x -= sdf.shape[0] / 2 - y -= sdf.shape[1] / 2 + + # Convert the GridMap index to a map frame position + x -= H / 2 + y -= W / 2 x *= msg.info.resolution y *= msg.info.resolution x += msg.info.pose.position.x y += msg.info.pose.position.y + # Publish the goal goal = PoseWithCovarianceStamped() goal.header.stamp = msg.info.header.stamp - goal.header.frame_id = "odom" + goal.header.frame_id = self.map_frame goal.pose.pose.position.x = x goal.pose.pose.position.y = y goal.pose.pose.position.z = res.transform.translation.z + 0.3 - goal.pose.pose.orientation = res.transform.rotation + goal.pose.pose.orientation = rot self.pub_goal.publish(goal) + if self.debug: - target_layer = "sdf" - msg.data[msg.layers.index(target_layer)].data = sdf[::-1, ::-1].transpose().ravel() + # Republish the SDF used to search for the maximum goal + msg.data[msg.layers.index("sdf")].data = layers["sdf"][::-1, ::-1].transpose().ravel() self.pub.publish(msg) diff --git a/wild_visual_navigation_ros/scripts/wild_visual_navigation_node.py b/wild_visual_navigation_ros/scripts/wild_visual_navigation_node.py deleted file mode 100644 index 3212b3e2..00000000 --- a/wild_visual_navigation_ros/scripts/wild_visual_navigation_node.py +++ /dev/null @@ -1,978 +0,0 @@ -from wild_visual_navigation import WVN_ROOT_DIR -from wild_visual_navigation.image_projector import ImageProjector -from wild_visual_navigation.supervision_generator import SupervisionGenerator -from wild_visual_navigation.traversability_estimator import TraversabilityEstimator -from wild_visual_navigation.traversability_estimator import MissionNode, ProprioceptionNode -import wild_visual_navigation_ros.ros_converter as rc -from wild_visual_navigation.utils import accumulate_time -from wild_visual_navigation_msgs.msg import RobotState, SystemState -from wild_visual_navigation_msgs.srv import ( - LoadCheckpoint, - SaveCheckpoint, - LoadCheckpointResponse, - SaveCheckpointResponse, -) -from std_srvs.srv import SetBool, Trigger, TriggerResponse -from wild_visual_navigation.utils import SystemLevelTimer, SystemLevelContextTimer -from wild_visual_navigation.utils import SystemLevelGpuMonitor, SystemLevelContextGpuMonitor, accumulate_memory -from wild_visual_navigation.utils import WVNMode -from wild_visual_navigation.cfg import ExperimentParams -from wild_visual_navigation.utils import override_params -from wild_visual_navigation.learning.utils import load_yaml, load_env, create_experiment_folder -from geometry_msgs.msg import PoseStamped, Point, TwistStamped -from nav_msgs.msg import Path -from sensor_msgs.msg import Image, CameraInfo, CompressedImage -from std_msgs.msg import ColorRGBA, Float32, Float32MultiArray -from threading import Thread, Event -from visualization_msgs.msg import Marker -import message_filters -import os -import rospy -import seaborn as sns -import tf -import torch -import numpy as np -from typing import Optional -import traceback -import signal -import sys -import tf2_ros - -if torch.cuda.is_available(): - torch.cuda.empty_cache() - - -class WvnRosInterface: - def __init__(self): - # Timers to control the rate of the publishers - self.last_image_ts = rospy.get_time() - self.last_proprio_ts = rospy.get_time() - - # Read params - self.read_params() - - # Visualization - self.color_palette = sns.color_palette(self.colormap, as_cmap=True) - - # Setup Mission Folder - create_experiment_folder(self.params, load_env()) - - # Initialize traversability estimator - self.traversability_estimator = TraversabilityEstimator( - params=self.params, - device=self.device, - image_size=self.network_input_image_height, # Note: we assume height == width - segmentation_type=self.segmentation_type, - feature_type=self.feature_type, - max_distance=self.traversability_radius, - image_distance_thr=self.image_graph_dist_thr, - proprio_distance_thr=self.proprio_graph_dist_thr, - optical_flow_estimator_type=self.optical_flow_estimator_type, - min_samples_for_training=self.min_samples_for_training, - vis_node_index=self.vis_node_index, - mode=self.mode, - extraction_store_folder=self.extraction_store_folder, - patch_size=self.dino_patch_size, - scale_traversability=self.scale_traversability, - ) - - # Initialize traversability generator to process velocity commands - self.supervision_generator = SupervisionGenerator( - self.device, - kf_process_cov=0.1, - kf_meas_cov=10, - kf_outlier_rejection="huber", - kf_outlier_rejection_delta=0.5, - sigmoid_slope=20, - sigmoid_cutoff=0.25, # 0.2 - untraversable_thr=self.untraversable_thr, # 0.1 - time_horizon=0.05, - ) - # Initialize camera handler for subscription/publishing - self.camera_handler = {} - self.system_events = {} - # Setup ros - self.setup_ros(setup_fully=self.mode != WVNMode.EXTRACT_LABELS) - - # Setup Timer if needed - self.timer = SystemLevelTimer( - objects=[ - self, - self.traversability_estimator, - self.traversability_estimator._visualizer, - self.supervision_generator, - ], - names=["WVN", "TraversabilityEstimator", "Visualizer", "SupervisionGenerator"], - enabled=(self.print_image_callback_time or self.print_proprio_callback_time or self.log_time), - ) - - self.gpu_monitor = SystemLevelGpuMonitor( - objects=[ - self, - self.traversability_estimator, - self.traversability_estimator._visualizer, - self.supervision_generator, - ], - names=["WVN", "TraversabilityEstimator", "Visualizer", "SupervisionGenerator"], - enabled=self.log_memory, - device=self.device, - store_samples=True, - skip_n_samples=1, - ) - # Register shotdown callbacks - rospy.on_shutdown(self.shutdown_callback) - signal.signal(signal.SIGINT, self.shutdown_callback) - signal.signal(signal.SIGTERM, self.shutdown_callback) - - # Launch processes - print("─" * 80) - print("Launching [learning] thread") - if self.mode != WVNMode.EXTRACT_LABELS: - self.learning_thread_stop_event = Event() - self.learning_thread = Thread(target=self.learning_thread_loop, name="learning") - self.learning_thread.start() - - self.logging_thread_stop_event = Event() - self.logging_thread = Thread(target=self.logging_thread_loop, name="logging") - self.logging_thread.start() - print("[WVN] System ready") - - def shutdown_callback(self, *args, **kwargs): - # Write stuff to files - rospy.logwarn("Shutdown callback called") - if self.mode != WVNMode.EXTRACT_LABELS: - self.learning_thread_stop_event.set() - self.logging_thread_stop_event.set() - - print("Storing learned checkpoint...", end="") - self.traversability_estimator.save_checkpoint(self.params.general.model_path, "last_checkpoint.pt") - print("done") - - if self.log_time: - print("Storing timer data...", end="") - self.timer.store(folder=self.params.general.model_path) - print("done") - if self.log_memory: - print("Storing memory data...", end="") - self.gpu_monitor.store(folder=self.params.general.model_path) - print("done") - - print("Joining learning thread...", end="") - if self.mode != WVNMode.EXTRACT_LABELS: - self.learning_thread_stop_event.set() - self.learning_thread.join() - - self.logging_thread_stop_event.set() - self.logging_thread.join() - print("done") - - rospy.signal_shutdown(f"Wild Visual Navigation killed {args}") - sys.exit(0) - - @accumulate_time - def learning_thread_loop(self): - """This implements the main thread that runs the training procedure - We can only set the rate using rosparam - """ - # Set rate - rate = rospy.Rate(self.learning_thread_rate) - - # Learning loop - while True: - self.system_events["learning_thread_loop"] = {"time": rospy.get_time(), "value": "running"} - self.learning_thread_stop_event.wait(timeout=0.01) - if self.learning_thread_stop_event.is_set(): - rospy.logwarn("Stopped learning thread") - break - - # Optimize model - # with SystemLevelContextGpuMonitor(self, "training_step_time"): - with SystemLevelContextTimer(self, "training_step_time"): - res = self.traversability_estimator.train() - - if self.step != self.traversability_estimator.step: - self.step_time = rospy.get_time() - self.step = self.traversability_estimator.step - self.gpu_monitor.update(self.step, self.step_time) - - # Publish loss - system_state = SystemState() - for k in res.keys(): - if hasattr(system_state, k): - setattr(system_state, k, res[k]) - - system_state.pause_learning = self.traversability_estimator.pause_learning - system_state.mode = self.mode.value - system_state.step = self.step - self.pub_system_state.publish(system_state) - - rate.sleep() - - self.system_events["learning_thread_loop"] = {"time": rospy.get_time(), "value": "finished"} - self.learning_thread_stop_event.clear() - - def logging_thread_loop(self): - rate = rospy.Rate(self.logging_thread_rate) - # Learning loop - while True: - self.learning_thread_stop_event.wait(timeout=0.01) - if self.learning_thread_stop_event.is_set(): - rospy.logwarn("Stopped logging thread") - break - - current_time = rospy.get_time() - tmp = self.system_events.copy() - rospy.loginfo("System Events:") - for k, v in tmp.items(): - value = v["value"] - msg = ( - (k + ": ").ljust(35, " ") - + (str(round(current_time - v["time"], 4)) + "s ").ljust(10, " ") - + f" {value}" - ) - rospy.loginfo(msg) - rate.sleep() - rospy.loginfo("--------------") - self.learning_thread_stop_event.clear() - - @accumulate_time - def read_params(self): - """Reads all the parameters from the parameter server""" - # Topics - self.robot_state_topic = rospy.get_param("~robot_state_topic") - self.desired_twist_topic = rospy.get_param("~desired_twist_topic") - self.camera_topics = rospy.get_param("~camera_topics") - - # Frames - self.fixed_frame = rospy.get_param("~fixed_frame") - self.base_frame = rospy.get_param("~base_frame") - self.footprint_frame = rospy.get_param("~footprint_frame") - - # Robot size and specs - self.robot_length = rospy.get_param("~robot_length") - self.robot_width = rospy.get_param("~robot_width") - self.robot_height = rospy.get_param("~robot_height") - - # Traversability estimation params - self.traversability_radius = rospy.get_param("~traversability_radius") - self.image_graph_dist_thr = rospy.get_param("~image_graph_dist_thr") - self.proprio_graph_dist_thr = rospy.get_param("~proprio_graph_dist_thr") - self.network_input_image_height = rospy.get_param("~network_input_image_height") - self.network_input_image_width = rospy.get_param("~network_input_image_width") - self.segmentation_type = rospy.get_param("~segmentation_type") - self.feature_type = rospy.get_param("~feature_type") - self.dino_patch_size = rospy.get_param("~dino_patch_size") - self.confidence_std_factor = rospy.get_param("~confidence_std_factor") - self.scale_traversability = rospy.get_param("~scale_traversability") - self.scale_traversability_max_fpr = rospy.get_param("~scale_traversability_max_fpr") - self.min_samples_for_training = rospy.get_param("~min_samples_for_training") - self.vis_node_index = rospy.get_param("~debug_supervision_node_index_from_last") - - # Supervision Generator - self.robot_max_velocity = rospy.get_param("~robot_max_velocity") - self.untraversable_thr = rospy.get_param("~untraversable_thr") - - # Optical flow params - self.optical_flow_estimator_type = rospy.get_param("~optical_flow_estimator_type") - - # Threads - self.image_callback_rate = rospy.get_param("~image_callback_rate") # hertz - self.proprio_callback_rate = rospy.get_param("~proprio_callback_rate") # hertz - self.learning_thread_rate = rospy.get_param("~learning_thread_rate") # hertz - self.logging_thread_rate = rospy.get_param("~logging_thread_rate") # hertz - - # Data storage - self.mission_name = rospy.get_param("~mission_name") - self.mission_timestamp = rospy.get_param("~mission_timestamp") - - # Print timings - self.print_image_callback_time = rospy.get_param("~print_image_callback_time") - self.print_proprio_callback_time = rospy.get_param("~print_proprio_callback_time") - self.log_time = rospy.get_param("~log_time") - self.log_memory = rospy.get_param("~log_memory") - self.log_confidence = rospy.get_param("~log_confidence") - self.verbose = rospy.get_param("~verbose") - - # Select mode: # debug, online, extract_labels - self.use_debug_for_desired = rospy.get_param("~use_debug_for_desired") # Note: Unused parameter - self.mode = WVNMode.from_string(rospy.get_param("~mode", "debug")) - self.extraction_store_folder = rospy.get_param("~extraction_store_folder") - - # Parse operation modes - if self.mode == WVNMode.ONLINE: - print("\nWARNING: online_mode enabled. The graph will not store any debug/training data such as images\n") - - elif self.mode == WVNMode.EXTRACT_LABELS: - self.image_callback_rate = 3 - self.proprio_callback_rate = 4 - self.optical_flow_estimator_type = False - self.image_graph_dist_thr = 0.2 - self.proprio_graph_dist_thr = 0.1 - - os.makedirs(os.path.join(self.extraction_store_folder, "image"), exist_ok=True) - os.makedirs(os.path.join(self.extraction_store_folder, "supervision_mask"), exist_ok=True) - - # Experiment file - exp_file = rospy.get_param("~exp") - - # Torch device - self.device = rospy.get_param("~device") - - # Visualization - self.colormap = rospy.get_param("~colormap") - - # Initialize traversability estimator parameters - self.params = ExperimentParams() - if exp_file != "nan": - exp_override = load_yaml(os.path.join(WVN_ROOT_DIR, "cfg/exp", exp_file)) - self.params = override_params(self.params, exp_override) - - self.params.general.name = self.mission_name - self.params.general.timestamp = self.mission_timestamp - self.params.general.log_confidence = self.log_confidence - self.params.loss.confidence_std_factor = self.confidence_std_factor - self.params.loss.w_temp = 0 - self.step = -1 - self.step_time = rospy.get_time() - - assert self.optical_flow_estimator_type == "none", "Optical flow estimator not tested due to changes" - - def setup_rosbag_replay(self, tf_listener): - self.tf_listener = tf_listener - - def setup_ros(self, setup_fully=True): - """Main function to setup ROS-related stuff: publishers, subscribers and services""" - if setup_fully: - # Initialize TF listener - self.tf_buffer = tf2_ros.Buffer(cache_time=rospy.Duration(20.0)) - self.tf_listener = tf2_ros.TransformListener(self.tf_buffer) - - # Robot state callback - robot_state_sub = message_filters.Subscriber(self.robot_state_topic, RobotState) - cache1 = message_filters.Cache(robot_state_sub, 10) - desired_twist_sub = message_filters.Subscriber(self.desired_twist_topic, TwistStamped) - cache2 = message_filters.Cache(desired_twist_sub, 10) - - self.robot_state_sub = message_filters.ApproximateTimeSynchronizer( - [robot_state_sub, desired_twist_sub], queue_size=10, slop=0.5 - ) - - print("Start waiting for RobotState topic being published!") - rospy.wait_for_message(self.robot_state_topic, RobotState) - print("Start waiting for TwistStamped topic being published!") - rospy.wait_for_message(self.desired_twist_topic, TwistStamped) - - self.robot_state_sub.registerCallback(self.robot_state_callback) - - # Image callback - for cam in self.camera_topics: - # Initialize camera handler for given cam - self.camera_handler[cam] = {} - # Store camera name - self.camera_topics[cam]["name"] = cam - # Set subscribers - base_topic = self.camera_topics[cam]["image_topic"].replace("/compressed", "") - is_compressed = self.camera_topics[cam]["image_topic"] != base_topic - if is_compressed: - image_sub = message_filters.Subscriber(self.camera_topics[cam]["image_topic"], CompressedImage) - else: - image_sub = message_filters.Subscriber(self.camera_topics[cam]["image_topic"], Image) - - info_sub = message_filters.Subscriber(self.camera_topics[cam]["info_topic"], CameraInfo) - sync = message_filters.ApproximateTimeSynchronizer([image_sub, info_sub], queue_size=2, slop=0.5) - sync.registerCallback(self.image_callback, self.camera_topics[cam]) - self.camera_handler[cam]["image_sub"] = image_sub - self.camera_handler[cam]["info_sub"] = info_sub - self.camera_handler[cam]["sync"] = sync - - # Set publishers - input_pub = rospy.Publisher(f"/wild_visual_navigation_node/{cam}/image_input", Image, queue_size=10) - trav_pub = rospy.Publisher(f"/wild_visual_navigation_node/{cam}/traversability", Image, queue_size=10) - conf_pub = rospy.Publisher(f"/wild_visual_navigation_node/{cam}/confidence", Image, queue_size=10) - info_pub = rospy.Publisher(f"/wild_visual_navigation_node/{cam}/camera_info", CameraInfo, queue_size=10) - self.camera_handler[cam]["input_pub"] = input_pub - self.camera_handler[cam]["trav_pub"] = trav_pub - self.camera_handler[cam]["conf_pub"] = conf_pub - self.camera_handler[cam]["info_pub"] = info_pub - - if self.mode == WVNMode.DEBUG: - # Debugging publishers - last_image_labeled_pub = rospy.Publisher( - f"/wild_visual_navigation_node/{cam}/debug/last_node_image_labeled", Image, queue_size=10 - ) - last_image_mask_pub = rospy.Publisher( - f"/wild_visual_navigation_node/{cam}/debug/last_node_image_mask", Image, queue_size=10 - ) - last_image_trav_pub = rospy.Publisher( - f"/wild_visual_navigation_node/{cam}/debug/last_image_traversability", Image, queue_size=10 - ) - last_image_conf_pub = rospy.Publisher( - f"/wild_visual_navigation_node/{cam}/debug/last_image_confidence", Image, queue_size=10 - ) - self.camera_handler[cam]["debug"] = {} - self.camera_handler[cam]["debug"]["image_labeled"] = last_image_labeled_pub - self.camera_handler[cam]["debug"]["image_mask"] = last_image_mask_pub - self.camera_handler[cam]["debug"]["image_trav"] = last_image_trav_pub - self.camera_handler[cam]["debug"]["image_conf"] = last_image_conf_pub - - # 3D outputs - self.pub_debug_proprio_graph = rospy.Publisher( - "/wild_visual_navigation_node/proprioceptive_graph", Path, queue_size=10 - ) - self.pub_mission_graph = rospy.Publisher("/wild_visual_navigation_node/mission_graph", Path, queue_size=10) - self.pub_graph_footprints = rospy.Publisher( - "/wild_visual_navigation_node/graph_footprints", Marker, queue_size=10 - ) - # 1D outputs - self.pub_instant_traversability = rospy.Publisher( - "/wild_visual_navigation_node/instant_traversability", Float32, queue_size=10 - ) - self.pub_system_state = rospy.Publisher("/wild_visual_navigation_node/system_state", SystemState, queue_size=10) - - # Services - # Like, reset graph or the like - self.save_checkpt_service = rospy.Service("~save_checkpoint", SaveCheckpoint, self.save_checkpoint_callback) - self.load_checkpt_service = rospy.Service("~load_checkpoint", LoadCheckpoint, self.load_checkpoint_callback) - - self.pause_learning_service = rospy.Service("~pause_learning", SetBool, self.pause_learning_callback) - self.reset_service = rospy.Service("~reset", Trigger, self.reset_callback) - - def pause_learning_callback(self, req): - """Start and stop the network training""" - prev_state = self.traversability_estimator.pause_learning - self.traversability_estimator.pause_learning = req.data - if not req.data and prev_state: - message = "Resume training!" - elif req.data and prev_state: - message = "Training was already paused!" - elif not req.data and not prev_state: - message = "Training was already running!" - elif req.data and not prev_state: - message = "Pause training!" - message += f" Updated the network for {self.traversability_estimator.step} steps" - - return True, message - - def reset_callback(self, req): - """Resets the system""" - print("WARNING: System reset!") - - print("Storing learned checkpoint...", end="") - self.traversability_estimator.save_checkpoint(self.params.general.model_path, "last_checkpoint.pt") - print("done") - - if self.log_time: - print("Storing timer data...", end="") - self.timer.store(folder=self.params.general.model_path) - print("done") - if self.log_memory: - print("Storing memory data...", end="") - self.gpu_monitor.store(folder=self.params.general.model_path) - print("done") - - # Create new mission folder - create_experiment_folder(self.params, load_env()) - - # Reset traversability estimator - self.traversability_estimator.reset() - - print("Reset done") - return TriggerResponse(True, "Reset done!") - - @accumulate_time - def save_checkpoint_callback(self, req): - """Service call to store the learned checkpoint - - Args: - req (TriggerRequest): Trigger request service - """ - if req.checkpoint_name == "": - req.checkpoint_name = "last_checkpoint.pt" - - if req.mission_path == "": - message = f"[WARNING] Store checkpoint {req.checkpoint_name} default mission path: {self.params.general.model_path}/{req.checkpoint_name}" - req.mission_path = self.params.general.model_path - else: - message = f"Store checkpoint {req.checkpoint_name} to: {req.mission_path}/{req.checkpoint_name}" - - self.traversability_estimator.save_checkpoint(req.mission_path, req.checkpoint_name) - return SaveCheckpointResponse(success=True, message=message) - - def load_checkpoint_callback(self, req): - """Service call to load a learned checkpoint - - Args: - req (TriggerRequest): Trigger request service - """ - if req.checkpoint_path == "": - return LoadCheckpointResponse( - success=False, - message=f"Path [{req.checkpoint_path}] is empty. Please check and try again", - ) - checkpoint_path = req.checkpoint_path - self.traversability_estimator.load_checkpoint(checkpoint_path) - return LoadCheckpointResponse(success=True, message=f"Checkpoint [{checkpoint_path}] loaded successfully") - - @accumulate_time - def query_tf(self, parent_frame: str, child_frame: str, stamp: Optional[rospy.Time] = None): - """Helper function to query TFs - - Args: - parent_frame (str): Frame of the parent TF - child_frame (str): Frame of the child - """ - - if stamp is None: - stamp = rospy.Time(0) - - try: - res = self.tf_buffer.lookup_transform(parent_frame, child_frame, stamp, timeout=rospy.Duration(0.03)) - trans = (res.transform.translation.x, res.transform.translation.y, res.transform.translation.z) - rot = np.array( - [res.transform.rotation.x, res.transform.rotation.y, res.transform.rotation.z, res.transform.rotation.w] - ) - rot /= np.linalg.norm(rot) - return (trans, tuple(rot)) - except Exception as e: - print("Error in query tf: ", e) - rospy.logwarn(f"Couldn't get between {parent_frame} and {child_frame}") - return (None, None) - - @accumulate_memory - @accumulate_time - def robot_state_callback(self, state_msg, desired_twist_msg: TwistStamped): - """Main callback to process proprioceptive info (robot state) - - Args: - state_msg (wild_visual_navigation_msgs/RobotState): Robot state message - desired_twist_msg (geometry_msgs/TwistStamped): Desired twist message - """ - self.system_events["robot_state_callback_received"] = {"time": rospy.get_time(), "value": "message received"} - try: - ts = state_msg.header.stamp.to_sec() - if abs(ts - self.last_proprio_ts) < 1.0 / self.proprio_callback_rate: - self.system_events["robot_state_callback_cancled"] = { - "time": rospy.get_time(), - "value": "cancled due to rate", - } - return - self.last_propio_ts = ts - - # Query transforms from TF - suc, pose_base_in_world = rc.ros_tf_to_torch( - self.query_tf(self.fixed_frame, self.base_frame, state_msg.header.stamp), device=self.device - ) - if not suc: - self.system_events["robot_state_callback_cancled"] = { - "time": rospy.get_time(), - "value": "cancled due to pose_base_in_world", - } - return - - suc, pose_footprint_in_base = rc.ros_tf_to_torch( - self.query_tf(self.base_frame, self.footprint_frame, state_msg.header.stamp), device=self.device - ) - if not suc: - self.system_events["robot_state_callback_cancled"] = { - "time": rospy.get_time(), - "value": "cancled due to pose_footprint_in_base", - } - return - - # The footprint requires a correction: we use the same orientation as the base - pose_footprint_in_base[:3, :3] = torch.eye(3, device=self.device) - - # Convert state to tensor - proprio_tensor, proprio_labels = rc.wvn_robot_state_to_torch(state_msg, device=self.device) - current_twist_tensor = rc.twist_stamped_to_torch(state_msg.twist, device=self.device) - desired_twist_tensor = rc.twist_stamped_to_torch(desired_twist_msg, device=self.device) - - # Update traversability - traversability, traversability_var, is_untraversable = self.supervision_generator.update_velocity_tracking( - current_twist_tensor, desired_twist_tensor, velocities=["vx", "vy"] - ) - - # Create proprioceptive node for the graph - proprio_node = ProprioceptionNode( - timestamp=ts, - pose_base_in_world=pose_base_in_world, - pose_footprint_in_base=pose_footprint_in_base, - twist_in_base=current_twist_tensor, - desired_twist_in_base=desired_twist_tensor, - width=self.robot_width, - length=self.robot_length, - height=self.robot_width, - proprioception=proprio_tensor, - traversability=traversability, - traversability_var=traversability_var, - is_untraversable=is_untraversable, - ) - - # Add node to the graph - self.traversability_estimator.add_proprio_node(proprio_node) - - if self.mode == WVNMode.DEBUG or self.mode == WVNMode.ONLINE: - self.visualize_proprioception() - - if self.print_proprio_callback_time: - print(self.timer) - - self.system_events["robot_state_callback_state"] = { - "time": rospy.get_time(), - "value": "executed successfully", - } - - except Exception as e: - traceback.print_exc() - print("error state callback", e) - self.system_events["robot_state_callback_state"] = { - "time": rospy.get_time(), - "value": f"failed to execute {e}", - } - - raise Exception("Error in robot state callback") - - @accumulate_memory - @accumulate_time - def image_callback(self, image_msg: Image, info_msg: CameraInfo, camera_options: dict): - """Main callback to process incoming images - - Args: - image_msg (sensor_msgs/Image): Incoming image - info_msg (sensor_msgs/CameraInfo): Camera info message associated to the image - """ - self.system_events["image_callback_received"] = {"time": rospy.get_time(), "value": "message received"} - if self.verbose: - print(f"\nImage callback: {camera_options['name']}... ", end="") - try: - - # Run the callback so as to match the desired rate - ts = image_msg.header.stamp.to_sec() - if abs(ts - self.last_image_ts) < 1.0 / self.image_callback_rate: - if self.verbose: - print("skip") - return - else: - if self.verbose: - print("process") - self.last_image_ts = ts - - # Query transforms from TF - suc, pose_base_in_world = rc.ros_tf_to_torch( - self.query_tf(self.fixed_frame, self.base_frame, image_msg.header.stamp), device=self.device - ) - if not suc: - self.system_events["image_callback_cancled"] = { - "time": rospy.get_time(), - "value": "cancled due to pose_base_in_world", - } - return - suc, pose_cam_in_base = rc.ros_tf_to_torch( - self.query_tf(self.base_frame, image_msg.header.frame_id, image_msg.header.stamp), device=self.device - ) - if not suc: - self.system_events["image_callback_cancled"] = { - "time": rospy.get_time(), - "value": "cancled due to pose_cam_in_base", - } - return - - # Prepare image projector - K, H, W = rc.ros_cam_info_to_tensors(info_msg, device=self.device) - image_projector = ImageProjector( - K=K, h=H, w=W, new_h=self.network_input_image_height, new_w=self.network_input_image_width - ) - - # Add image to base node - # convert image message to torch image - torch_image = rc.ros_image_to_torch(image_msg, device=self.device) - torch_image = image_projector.resize_image(torch_image) - - # Create mission node for the graph - mission_node = MissionNode( - timestamp=ts, - pose_base_in_world=pose_base_in_world, - pose_cam_in_base=pose_cam_in_base, - image=torch_image, - image_projector=image_projector, - correspondence=torch.zeros((1,)) if self.optical_flow_estimator_type != "sparse" else None, - camera_name=camera_options["name"], - use_for_training=camera_options["use_for_training"], - ) - - # Add node to graph - added_new_node = self.traversability_estimator.add_mission_node(mission_node) - - with SystemLevelContextGpuMonitor(self, "update_prediction"): - with SystemLevelContextTimer(self, "update_prediction"): - # Update prediction - self.traversability_estimator.update_prediction(mission_node) - - if self.mode == WVNMode.ONLINE or self.mode == WVNMode.DEBUG: - self.publish_predictions(mission_node, image_msg, info_msg, image_projector.scaled_camera_matrix) - - if self.mode == WVNMode.DEBUG: - # Publish current predictions - self.visualize_mission() - # Publish supervision data depending on the mode - self.visualize_debug() - - # If a new node was added, update the node is used to visualize the supervision signals - if added_new_node: - self.traversability_estimator.update_visualization_node() - - # Print callback time if required - if self.print_image_callback_time: - print(self.timer) - - self.system_events["image_callback_state"] = {"time": rospy.get_time(), "value": "executed successfully"} - - except Exception as e: - traceback.print_exc() - print("error image callback", e) - self.system_events["image_callback_state"] = {"time": rospy.get_time(), "value": f"failed to execute {e}"} - raise Exception("Error in image callback") - - @accumulate_memory - @accumulate_time - def publish_predictions( - self, mission_node: MissionNode, image_msg: Image, info_msg: CameraInfo, scaled_camera_matrix: torch.Tensor - ): - """Publish predictions for the current image - - Args: - mission_node (MissionNode): Mission node - """ - # Publish predictions - if mission_node is not None and self.mode != WVNMode.EXTRACT_LABELS: - # Get camera name - cam = mission_node.camera_name - - # Traversability - out_trav = torch.zeros(mission_node.feature_segments.shape, device=self.device) - out_conf = torch.zeros(mission_node.feature_segments.shape, device=self.device) - fs = mission_node.feature_segments.reshape(-1) - out_trav = out_trav.reshape(-1) - out_conf = out_conf.reshape(-1) - traversability = mission_node.prediction[:, 0] - - # Optionally rescale the traversability output before publishing - if self.scale_traversability: - # Compute ROC Threshold - if self.traversability_estimator._auxiliary_training_roc._update_count != 0: - fpr, tpr, thresholds = self.traversability_estimator._auxiliary_training_roc.compute() - index = torch.where(fpr > self.scale_traversability_max_fpr)[0][0] - threshold = thresholds[index] - self.traversability_estimator.scale_traversability_threshold = threshold - - # Apply piecewise linear scaling 0->0; threshold->0.5; 1->1 - traversability = traversability.clone() - m = traversability < threshold - # Scale untraversable - traversability[m] *= 0.5 / threshold - # Scale traversable - traversability[~m] -= threshold - traversability[~m] *= 0.5 / (1 - threshold) - traversability[~m] += 0.5 - traversability = traversability.clip(0, 1) - - out_trav = traversability[fs] - out_conf = mission_node.confidence[fs] - out_trav = out_trav.reshape(mission_node.feature_segments.shape) - out_conf = out_conf.reshape(mission_node.feature_segments.shape) - - # Color, resized image used as the network input - msg = rc.torch_to_ros_image(mission_node.image.cpu(), "rgb8") - msg.header = image_msg.header - msg.width = out_trav.shape[0] - msg.height = out_trav.shape[1] - self.camera_handler[cam]["input_pub"].publish(msg) - - # Output traversability - msg = rc.numpy_to_ros_image(out_trav.cpu().numpy(), "passthrough") - msg.header = image_msg.header - msg.width = out_trav.shape[0] - msg.height = out_trav.shape[1] - self.camera_handler[cam]["trav_pub"].publish(msg) - - # Output confidence - # out_conf[:,:] = 1 - # out_conf[int(out_conf.shape[0]/2):,:] = 0 - # out_conf[:,int(out_conf.shape[1]/2):] = 0 - msg = rc.numpy_to_ros_image(out_conf.cpu().numpy(), "passthrough") - msg.header = image_msg.header - msg.width = out_trav.shape[0] - msg.height = out_trav.shape[1] - self.camera_handler[cam]["conf_pub"].publish(msg) - - # Output camera info - # The header stays the same, only the image, width, K and P change due to the different resolution - info_msg.width = out_trav.shape[0] - info_msg.height = out_trav.shape[1] - info_msg.K = scaled_camera_matrix[0, :3, :3].cpu().numpy().flatten().tolist() - info_msg.P = scaled_camera_matrix[0, :3, :4].cpu().numpy().flatten().tolist() - self.camera_handler[cam]["info_pub"].publish(info_msg) - - @accumulate_memory - @accumulate_time - def visualize_proprioception(self): - """Publishes all the visualizations related to proprioceptive info, - like footprints and the sliding graph - """ - # Get current time for later - now = rospy.Time.now() - - proprio_graph_msg = Path() - proprio_graph_msg.header.frame_id = self.fixed_frame - proprio_graph_msg.header.stamp = now - - # Footprints - footprints_marker = Marker() - footprints_marker.id = 0 - footprints_marker.ns = "footprints" - footprints_marker.header.frame_id = self.fixed_frame - footprints_marker.header.stamp = now - footprints_marker.type = Marker.TRIANGLE_LIST - footprints_marker.action = Marker.ADD - footprints_marker.scale.x = 1 - footprints_marker.scale.y = 1 - footprints_marker.scale.z = 1 - footprints_marker.color.a = 1.0 - footprints_marker.pose.orientation.w = 1.0 - footprints_marker.pose.position.x = 0.0 - footprints_marker.pose.position.y = 0.0 - footprints_marker.pose.position.z = 0.0 - - last_points = [None, None] - for node in self.traversability_estimator.get_proprio_nodes(): - # Path - pose = PoseStamped() - pose.header.stamp = now - pose.header.frame_id = self.fixed_frame - pose.pose = rc.torch_to_ros_pose(node.pose_base_in_world) - proprio_graph_msg.poses.append(pose) - - # Color for traversability - r, g, b, _ = self.color_palette(node.traversability.item()) - c = ColorRGBA(r, g, b, 0.95) - - # Rainbow path - side_points = node.get_side_points() - - # if the last points are empty, fill and continue - if None in last_points: - for i in range(2): - last_points[i] = Point( - x=side_points[i, 0].item(), y=side_points[i, 1].item(), z=side_points[i, 2].item() - ) - continue - else: - # here we want to add 2 triangles: from the last saved points (lp) - # and the current side points (sp): - # triangle 1: [lp0, lp1, sp0] - # triangle 2: [lp1, sp0, sp1] - - points_to_add = [] - # add the last points points - for lp in last_points: - points_to_add.append(lp) - # Add first from new side points - points_to_add.append( - Point(x=side_points[i, 0].item(), y=side_points[i, 1].item(), z=side_points[i, 2].item()) - ) - # Add last of last points - points_to_add.append(last_points[0]) - # Add new side points and update last points - for i in range(2): - last_points[i] = Point( - x=side_points[i, 0].item(), y=side_points[i, 1].item(), z=side_points[i, 2].item() - ) - points_to_add.append(last_points[i]) - - # Add all the points and colors - for p in points_to_add: - footprints_marker.points.append(p) - footprints_marker.colors.append(c) - - # Untraversable plane - if node.is_untraversable: - untraversable_plane = node.get_untraversable_plane(grid_size=2) - N, D = untraversable_plane.shape - for n in [0, 1, 3, 2, 0, 3]: # this is a hack to show the triangles correctly - p = Point() - p.x = untraversable_plane[n, 0] - p.y = untraversable_plane[n, 1] - p.z = untraversable_plane[n, 2] - footprints_marker.points.append(p) - footprints_marker.colors.append(c) - - # Publish - if len(footprints_marker.points) % 3 != 0: - if self.verbose: - print(f"number of points for footprint is {len(footprints_marker.points)}") - return - self.pub_graph_footprints.publish(footprints_marker) - self.pub_debug_proprio_graph.publish(proprio_graph_msg) - - # Publish latest traversability - self.pub_instant_traversability.publish(self.supervision_generator.traversability) - self.system_events["visualize_proprioception"] = {"time": rospy.get_time(), "value": f"executed successfully"} - - @accumulate_memory - @accumulate_time - def visualize_mission(self): - """Publishes all the visualizations related to the mission graph""" - # Get current time for later - now = rospy.Time.now() - - # Publish mission graph - mission_graph_msg = Path() - mission_graph_msg.header.frame_id = self.fixed_frame - mission_graph_msg.header.stamp = now - - for node in self.traversability_estimator.get_mission_nodes(): - pose = PoseStamped() - pose.header.stamp = now - pose.header.frame_id = self.fixed_frame - pose.pose = rc.torch_to_ros_pose(node.pose_cam_in_world) - mission_graph_msg.poses.append(pose) - - self.pub_mission_graph.publish(mission_graph_msg) - - @accumulate_memory - @accumulate_time - def visualize_debug(self): - """Publishes all the debugging, slow visualizations""" - - # Get visualization node - vis_node = self.traversability_estimator.get_mission_node_for_visualization() - - # Publish predictions - if vis_node is not None and self.mode != WVNMode.EXTRACT_LABELS: - cam = vis_node.camera_name - ( - np_prediction_image, - np_uncertainty_image, - ) = self.traversability_estimator.plot_mission_node_prediction(vis_node) - - # self.pub_image_input.publish(rc.torch_to_ros_image(vis_node.image)) - self.camera_handler[cam]["debug"]["image_trav"].publish(rc.numpy_to_ros_image(np_prediction_image)) - self.camera_handler[cam]["debug"]["image_conf"].publish(rc.numpy_to_ros_image(np_uncertainty_image)) - - # Publish reprojections of last node in graph - if vis_node is not None: - cam = vis_node.camera_name - - np_labeled_image, np_mask_image = self.traversability_estimator.plot_mission_node_training(vis_node) - - if np_labeled_image is None or np_mask_image is None: - return - self.camera_handler[cam]["debug"]["image_labeled"].publish(rc.numpy_to_ros_image(np_labeled_image)) - self.camera_handler[cam]["debug"]["image_mask"].publish(rc.numpy_to_ros_image(np_mask_image)) - - -if __name__ == "__main__": - node_name = "wild_visual_navigation_node" - rospy.init_node("wild_visual_navigation_node") - wvn = WvnRosInterface() - rospy.spin() diff --git a/wild_visual_navigation_ros/scripts/wvn_feature_extractor_node.py b/wild_visual_navigation_ros/scripts/wvn_feature_extractor_node.py new file mode 100644 index 00000000..e6d4caa5 --- /dev/null +++ b/wild_visual_navigation_ros/scripts/wvn_feature_extractor_node.py @@ -0,0 +1,464 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from wild_visual_navigation import WVN_ROOT_DIR +from wild_visual_navigation.feature_extractor import FeatureExtractor +from wild_visual_navigation.cfg import ExperimentParams, RosFeatureExtractorNodeParams +from wild_visual_navigation.image_projector import ImageProjector +from wild_visual_navigation_msgs.msg import ImageFeatures +import wild_visual_navigation_ros.ros_converter as rc +from wild_visual_navigation_ros.scheduler import Scheduler +from wild_visual_navigation_ros.reload_rosparams import reload_rosparams +from wild_visual_navigation.model import get_model +from wild_visual_navigation.utils import ConfidenceGenerator +from wild_visual_navigation.utils import AnomalyLoss + +import rospy +from sensor_msgs.msg import Image, CameraInfo, CompressedImage +from std_msgs.msg import MultiArrayDimension + +import torch +import numpy as np +import torch.nn.functional as F +import signal +import sys +import traceback +from omegaconf import OmegaConf, read_write +from wild_visual_navigation.utils import Data +from os.path import join +from threading import Thread, Event +from prettytable import PrettyTable +from termcolor import colored +import os + + +class WvnFeatureExtractor: + def __init__(self, node_name): + # Read params + self.read_params() + + # Initialize variables + self._node_name = node_name + self._load_model_counter = 0 + + # Timers to control the rate of the subscriber + self._last_checkpoint_ts = rospy.get_time() + + # Setup modules + self._feature_extractor = FeatureExtractor( + self._ros_params.device, + segmentation_type=self._ros_params.segmentation_type, + feature_type=self._ros_params.feature_type, + patch_size=self._ros_params.dino_patch_size, + backbone_type=self._ros_params.dino_backbone, + input_size=self._ros_params.network_input_image_height, + slic_num_components=self._ros_params.slic_num_components, + ) + + # Load model + # We manually update the input size to the models depending on the chosen features + self._params.model.simple_mlp_cfg.input_size = self._feature_extractor.feature_dim + self._params.model.double_mlp_cfg.input_size = self._feature_extractor.feature_dim + self._params.model.simple_gcn_cfg.input_size = self._feature_extractor.feature_dim + self._params.model.linear_rnvp_cfg.input_size = self._feature_extractor.feature_dim + self._model = get_model(self._params.model).to(self._ros_params.device) + self._model.eval() + + if self.anomaly_detection: + self._confidence_generator = ConfidenceGenerator( + method=self._params.loss_anomaly.method, std_factor=self._params.loss_anomaly.confidence_std_factor + ) + + else: + self._confidence_generator = ConfidenceGenerator( + method=self._params.loss.method, std_factor=self._params.loss.confidence_std_factor + ) + self._log_data = {} + self.setup_ros() + + # Setup verbosity levels + if self._ros_params.verbose: + + self._status_thread_stop_event = Event() + self._status_thread = Thread(target=self.status_thread_loop, name="status") + self._run_status_thread = True + self._status_thread.start() + + rospy.on_shutdown(self.shutdown_callback) + signal.signal(signal.SIGINT, self.shutdown_callback) + signal.signal(signal.SIGTERM, self.shutdown_callback) + + def shutdown_callback(self, *args, **kwargs): + self._run_status_thread = False + self._status_thread_stop_event.set() + self._status_thread.join() + + rospy.signal_shutdown(f"Wild Visual Navigation Feature Extraction killed {args}") + sys.exit(0) + + def read_params(self): + """Reads all the parameters from the parameter server""" + self._params = OmegaConf.structured(ExperimentParams) + self._ros_params = OmegaConf.structured(RosFeatureExtractorNodeParams) + + # Override the empty dataclass with values from rosparm server + with read_write(self._ros_params): + for k in self._ros_params.keys(): + self._ros_params[k] = rospy.get_param(f"~{k}") + + with read_write(self._params): + self._params.loss.confidence_std_factor = self._ros_params.confidence_std_factor + self._params.loss_anomaly.confidence_std_factor = self._ros_params.confidence_std_factor + + self.anomaly_detection = self._params.model.name == "LinearRnvp" + + def setup_ros(self, setup_fully=True): + """Main function to setup ROS-related stuff: publishers, subscribers and services""" + # Image callback + + self._camera_handler = {} + self._camera_scheduler = Scheduler() + + if self._ros_params.verbose: + # DEBUG Logging + self._log_data[f"time_last_model"] = -1 + self._log_data[f"nr_model_updates"] = -1 + + self._last_image_ts = {} + + for cam in self._ros_params.camera_topics: + self._last_image_ts[cam] = rospy.get_time() + if self._ros_params.verbose: + # DEBUG Logging + self._log_data[f"nr_images_{cam}"] = 0 + self._log_data[f"time_last_image_{cam}"] = -1 + + # Initialize camera handler for given cam + self._camera_handler[cam] = {} + # Store camera name + self._ros_params.camera_topics[cam]["name"] = cam + + # Add to scheduler + self._camera_scheduler.add_process(cam, self._ros_params.camera_topics[cam]["scheduler_weight"]) + + # Camera info + t = self._ros_params.camera_topics[cam]["info_topic"] + rospy.loginfo(f"[{self._node_name}] Waiting for camera info topic {t}") + camera_info_msg = rospy.wait_for_message(self._ros_params.camera_topics[cam]["info_topic"], CameraInfo) + rospy.loginfo(f"[{self._node_name}] Done") + K, H, W = rc.ros_cam_info_to_tensors(camera_info_msg, device=self._ros_params.device) + + self._camera_handler[cam]["camera_info"] = camera_info_msg + self._camera_handler[cam]["K"] = K + self._camera_handler[cam]["H"] = H + self._camera_handler[cam]["W"] = W + + image_projector = ImageProjector( + K=self._camera_handler[cam]["K"], + h=self._camera_handler[cam]["H"], + w=self._camera_handler[cam]["W"], + new_h=self._ros_params.network_input_image_height, + new_w=self._ros_params.network_input_image_width, + ) + msg = self._camera_handler[cam]["camera_info"] + msg.width = self._ros_params.network_input_image_width + msg.height = self._ros_params.network_input_image_height + msg.K = image_projector.scaled_camera_matrix[0, :3, :3].cpu().numpy().flatten().tolist() + msg.P = image_projector.scaled_camera_matrix[0, :3, :4].cpu().numpy().flatten().tolist() + + with read_write(self._ros_params): + self._camera_handler[cam]["camera_info_msg_out"] = msg + self._camera_handler[cam]["image_projector"] = image_projector + + # Set subscribers + base_topic = self._ros_params.camera_topics[cam]["image_topic"].replace("/compressed", "") + is_compressed = self._ros_params.camera_topics[cam]["image_topic"] != base_topic + if is_compressed: + # TODO study the effect of the buffer size + image_sub = rospy.Subscriber( + self._ros_params.camera_topics[cam]["image_topic"], + CompressedImage, + self.image_callback, + callback_args=cam, + queue_size=1, + ) + else: + image_sub = rospy.Subscriber( + self._ros_params.camera_topics[cam]["image_topic"], + Image, + self.image_callback, + callback_args=cam, + queue_size=1, + ) + self._camera_handler[cam]["image_sub"] = image_sub + + # Set publishers + trav_pub = rospy.Publisher( + f"/wild_visual_navigation_node/{cam}/traversability", + Image, + queue_size=1, + ) + info_pub = rospy.Publisher( + f"/wild_visual_navigation_node/{cam}/camera_info", + CameraInfo, + queue_size=1, + ) + self._camera_handler[cam]["trav_pub"] = trav_pub + self._camera_handler[cam]["info_pub"] = info_pub + if self.anomaly_detection and self._ros_params.camera_topics[cam]["publish_confidence"]: + rospy.logwarn(f"[{self._node_name}] Warning force set public confidence to false") + self._ros_params.camera_topics[cam]["publish_confidence"] = False + + if self._ros_params.camera_topics[cam]["publish_input_image"]: + input_pub = rospy.Publisher( + f"/wild_visual_navigation_node/{cam}/image_input", + Image, + queue_size=1, + ) + self._camera_handler[cam]["input_pub"] = input_pub + + if self._ros_params.camera_topics[cam]["publish_confidence"]: + conf_pub = rospy.Publisher( + f"/wild_visual_navigation_node/{cam}/confidence", + Image, + queue_size=1, + ) + self._camera_handler[cam]["conf_pub"] = conf_pub + + if self._ros_params.camera_topics[cam]["use_for_training"]: + imagefeat_pub = rospy.Publisher( + f"/wild_visual_navigation_node/{cam}/feat", + ImageFeatures, + queue_size=1, + ) + self._camera_handler[cam]["imagefeat_pub"] = imagefeat_pub + + def status_thread_loop(self): + rate = rospy.Rate(self._ros_params.status_thread_rate) + # Learning loop + while self._run_status_thread: + self._status_thread_stop_event.wait(timeout=0.01) + if self._status_thread_stop_event.is_set(): + rospy.logwarn(f"[{self._node_name}] Stopped learning thread") + break + + t = rospy.get_time() + x = PrettyTable() + x.field_names = ["Key", "Value"] + + for k, v in self._log_data.items(): + if "time" in k: + d = t - v + if d < 0: + c = "red" + if d < 0.2: + c = "green" + elif d < 1.0: + c = "yellow" + else: + c = "red" + x.add_row([k, colored(round(d, 2), c)]) + else: + x.add_row([k, v]) + print(f"[{self._node_name}]\n{x}") + try: + rate.sleep() + except Exception: + rate = rospy.Rate(self._ros_params.status_thread_rate) + print(f"[{self._node_name}] Ignored jump pack in time!") + self._status_thread_stop_event.clear() + + @torch.no_grad() + def image_callback(self, image_msg: Image, cam: str): # info_msg: CameraInfo + """Main callback to process incoming images. + + Args: + image_msg (sensor_msgs/Image): Incoming image + info_msg (sensor_msgs/CameraInfo): Camera info message associated to the image + cam (str): Camera name + """ + # Check the rate + ts = image_msg.header.stamp.to_sec() + if abs(ts - self._last_image_ts[cam]) < 1.0 / self._ros_params.image_callback_rate: + return + + # Check the scheduler + if self._camera_scheduler.get() != cam: + return + else: + if self._ros_params.verbose: + rospy.loginfo(f"[{self._node_name}] Image callback: {cam} -> Process") + + self._last_image_ts[cam] = ts + + # If all the checks are passed, process the image + try: + if self._ros_params.verbose: + # DEBUG Logging + self._log_data[f"nr_images_{cam}"] += 1 + self._log_data[f"time_last_image_{cam}"] = rospy.get_time() + + # Update model from file if possible + self.load_model(image_msg.header.stamp) + + # Convert image message to torch image + torch_image = rc.ros_image_to_torch(image_msg, device=self._ros_params.device) + torch_image = self._camera_handler[cam]["image_projector"].resize_image(torch_image) + C, H, W = torch_image.shape + + # Extract features + _, feat, seg, center, dense_feat = self._feature_extractor.extract( + img=torch_image[None], + return_centers=False, + return_dense_features=True, + n_random_pixels=100, + ) + + # Forward pass to predict traversability + if self._ros_params.prediction_per_pixel: + # Pixel-wise traversability prediction using the dense features + data = Data(x=dense_feat[0].permute(1, 2, 0).reshape(-1, dense_feat.shape[1])) + else: + # input_feat = dense_feat[0].permute(1, 2, 0).reshape(-1, dense_feat.shape[1]) + # Segment-wise traversability prediction using the average feature per segment + input_feat = feat[seg.reshape(-1)] + data = Data(x=input_feat) + + # Predict traversability per feature + prediction = self._model.forward(data) + + if not self.anomaly_detection: + out_trav = prediction.reshape(H, W, -1)[:, :, 0] + else: + losses = prediction["logprob"].sum(1) + prediction["log_det"] + confidence = self._confidence_generator.inference_without_update(x=-losses) + trav = confidence + out_trav = trav.reshape(H, W, -1)[:, :, 0] + + msg = rc.numpy_to_ros_image(out_trav.cpu().numpy(), "passthrough") + msg.header = image_msg.header + msg.width = out_trav.shape[0] + msg.height = out_trav.shape[1] + self._camera_handler[cam]["trav_pub"].publish(msg) + + msg = self._camera_handler[cam]["camera_info_msg_out"] + msg.header = image_msg.header + self._camera_handler[cam]["info_pub"].publish(msg) + + # Publish image + if self._ros_params.camera_topics[cam]["publish_input_image"]: + msg = rc.numpy_to_ros_image( + (torch_image.permute(1, 2, 0) * 255).cpu().numpy().astype(np.uint8), + "rgb8", + ) + msg.header = image_msg.header + msg.width = torch_image.shape[1] + msg.height = torch_image.shape[2] + self._camera_handler[cam]["input_pub"].publish(msg) + + # Publish confidence + if self._ros_params.camera_topics[cam]["publish_confidence"]: + loss_reco = F.mse_loss(prediction[:, 1:], data.x, reduction="none").mean(dim=1) + confidence = self._confidence_generator.inference_without_update(x=loss_reco) + out_confidence = confidence.reshape(H, W) + msg = rc.numpy_to_ros_image(out_confidence.cpu().numpy(), "passthrough") + msg.header = image_msg.header + msg.width = out_confidence.shape[0] + msg.height = out_confidence.shape[1] + self._camera_handler[cam]["conf_pub"].publish(msg) + + # Publish features and feature_segments + if self._ros_params.camera_topics[cam]["use_for_training"]: + msg = ImageFeatures() + msg.header = image_msg.header + msg.feature_segments = rc.numpy_to_ros_image(seg.cpu().numpy().astype(np.int32), "passthrough") + msg.feature_segments.header = image_msg.header + feat_np = feat.cpu().numpy() + + mad1 = MultiArrayDimension() + mad1.label = "n" + mad1.size = feat_np.shape[0] + mad1.stride = feat_np.shape[0] * feat_np.shape[1] + + mad2 = MultiArrayDimension() + mad2.label = "feat" + mad2.size = feat_np.shape[1] + mad2.stride = feat_np.shape[1] + + msg.features.data = feat_np.flatten().tolist() + msg.features.layout.dim.append(mad1) + msg.features.layout.dim.append(mad2) + self._camera_handler[cam]["imagefeat_pub"].publish(msg) + + except Exception as e: + traceback.print_exc() + rospy.logerr(f"[self._node_name] error image callback", e) + self.system_events["image_callback_state"] = { + "time": rospy.get_time(), + "value": f"failed to execute {e}", + } + raise Exception("Error in image callback") + + # Step scheduler + self._camera_scheduler.step() + + def load_model(self, stamp): + """Method to load the new model weights to perform inference on the incoming images + + Args: + None + """ + ts = stamp.to_sec() + if abs(ts - self._last_checkpoint_ts) < 1.0 / self._ros_params.load_save_checkpoint_rate: + return + + self._last_checkpoint_ts = ts + + # self._load_model_counter += 1 + # if self._load_model_counter % 10 == 0: + p = join(WVN_ROOT_DIR, ".tmp_state_dict.pt") + # p = join(WVN_ROOT_DIR,"assets/checkpoints/mountain_bike_trail_fpr_0.25.pt") + + if os.path.exists(p): + new_model_state_dict = torch.load(p) + k = list(self._model.state_dict().keys())[-1] + + # check if the key is in state dict - this may be not the case if switched between models + # assumption first key within state_dict is unique and sufficient to identify if a model has changed + if k in new_model_state_dict: + # check if the model has changed + if (self._model.state_dict()[k] != new_model_state_dict[k]).any(): + if self._ros_params.verbose: + self._log_data[f"time_last_model"] = rospy.get_time() + self._log_data[f"nr_model_updates"] += 1 + + self._model.load_state_dict(new_model_state_dict, strict=False) + if "confidence_generator" in new_model_state_dict.keys(): + cg = new_model_state_dict["confidence_generator"] + self._confidence_generator.var = cg["var"] + self._confidence_generator.mean = cg["mean"] + self._confidence_generator.std = cg["std"] + + if self._ros_params.verbose: + m, s, v = cg["mean"].item(), cg["std"].item(), cg["var"].item() + rospy.loginfo(f"[{self._node_name}] Loaded Confidence Generator {m}, std {s} var {v}") + + else: + if self._ros_params.verbose: + rospy.logerr(f"[{self._node_name}] Model Loading Failed") + + +if __name__ == "__main__": + node_name = "wvn_feature_extractor_node" + rospy.init_node(node_name) + + reload_rosparams( + enabled=rospy.get_param("~reload_default_params", True), + node_name=node_name, + camera_cfg="wide_angle_dual", + ) + + wvn = WvnFeatureExtractor(node_name) + rospy.spin() diff --git a/wild_visual_navigation_ros/scripts/wvn_learning_node.py b/wild_visual_navigation_ros/scripts/wvn_learning_node.py new file mode 100644 index 00000000..9bcb3a55 --- /dev/null +++ b/wild_visual_navigation_ros/scripts/wvn_learning_node.py @@ -0,0 +1,962 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +from wild_visual_navigation import WVN_ROOT_DIR +from wild_visual_navigation.image_projector import ImageProjector +from wild_visual_navigation.supervision_generator import SupervisionGenerator +from wild_visual_navigation.traversability_estimator import TraversabilityEstimator +from wild_visual_navigation.traversability_estimator import MissionNode, SupervisionNode +import wild_visual_navigation_ros.ros_converter as rc +from wild_visual_navigation_ros.reload_rosparams import reload_rosparams +from wild_visual_navigation_msgs.msg import RobotState, SystemState, ImageFeatures +from wild_visual_navigation.visu import LearningVisualizer +from wild_visual_navigation_msgs.srv import ( + LoadCheckpoint, + SaveCheckpoint, + LoadCheckpointResponse, + SaveCheckpointResponse, +) +from wild_visual_navigation.utils import WVNMode, create_experiment_folder +from wild_visual_navigation.cfg import ExperimentParams, RosLearningNodeParams + +from std_srvs.srv import SetBool, Trigger, TriggerResponse +from geometry_msgs.msg import PoseStamped, Point, TwistStamped +from nav_msgs.msg import Path +from sensor_msgs.msg import Image, CameraInfo +from std_msgs.msg import ColorRGBA, Float32 +from visualization_msgs.msg import Marker +import tf2_ros +import rospy +import message_filters + +from pytictac import ClassTimer, ClassContextTimer, accumulate_time +from omegaconf import OmegaConf, read_write +from threading import Thread, Event +import os +import seaborn as sns +import torch +import numpy as np +from typing import Optional +import traceback +import signal +import sys +import time + + +def time_func(): + return rospy.get_time() + + +class WvnLearning: + def __init__(self, node_name): + # Timers to control the rate of the publishers + self._last_image_ts = time_func() + self._last_supervision_ts = time_func() + self._last_checkpoint_ts = time_func() + self._setup_ready = False + + # Prepare variables + self._node_name = node_name + + # Read params + self.read_params() + + # Initialize camera handler for subscription/publishing + self._system_events = {} + + # Setup ros + self.setup_ros(setup_fully=self._ros_params.mode != WVNMode.EXTRACT_LABELS) + + # Visualization + self._color_palette = sns.color_palette(self._ros_params.colormap, as_cmap=True) + + # Setup Mission Folder + model_path = create_experiment_folder(self._params) + + with read_write(self._params): + self._params.general.model_path = model_path + + # Initialize traversability estimator + self._traversability_estimator = TraversabilityEstimator( + params=self._params, + device=self._ros_params.device, + max_distance=self._ros_params.traversability_radius, + image_distance_thr=self._ros_params.image_graph_dist_thr, + supervision_distance_thr=self._ros_params.supervision_graph_dist_thr, + min_samples_for_training=self._ros_params.min_samples_for_training, + vis_node_index=self._ros_params.vis_node_index, + mode=self._ros_params.mode, + extraction_store_folder=self._ros_params.extraction_store_folder, + anomaly_detection=self.anomaly_detection, + ) + + # Initialize traversability generator to process velocity commands + self._supervision_generator = SupervisionGenerator( + device=self._ros_params.device, + kf_process_cov=0.1, + kf_meas_cov=10, + kf_outlier_rejection="huber", + kf_outlier_rejection_delta=0.5, + sigmoid_slope=20, + sigmoid_cutoff=0.25, # 0.2 + untraversable_thr=self._ros_params.untraversable_thr, # 0.1 + time_horizon=0.05, + graph_max_length=1, + ) + + # Setup Timer if needed + self._timer = ClassTimer( + objects=[ + self, + self._traversability_estimator, + self._traversability_estimator._visualizer, + self._supervision_generator, + ], + names=[ + "WVN", + "TraversabilityEstimator", + "Visualizer", + "SupervisionGenerator", + ], + enabled=( + self._ros_params.print_image_callback_time + or self._ros_params.print_supervision_callback_time + or self._ros_params.log_time + ), + ) + + # Register shutdown callbacks + rospy.on_shutdown(self.shutdown_callback) + signal.signal(signal.SIGINT, self.shutdown_callback) + signal.signal(signal.SIGTERM, self.shutdown_callback) + + # Launch processes + print("-" * 80) + self._setup_ready = True + rospy.loginfo(f"[{self._node_name}] Launching [learning] thread") + if self._ros_params.mode != WVNMode.EXTRACT_LABELS: + self._learning_thread_stop_event = Event() + self.learning_thread = Thread(target=self.learning_thread_loop, name="learning") + self.learning_thread.start() + + # self.logging_thread_stop_event = Event() + # self.logging_thread = Thread(target=self.logging_thread_loop, name="logging") + # self.logging_thread.start() + rospy.loginfo(f"[{self._node_name}] [WVN] System ready") + + def shutdown_callback(self, *args, **kwargs): + # Write stuff to files + rospy.logwarn("Shutdown callback called") + if self._ros_params.mode != WVNMode.EXTRACT_LABELS: + self._learning_thread_stop_event.set() + # self.logging_thread_stop_event.set() + + print(f"[{self._node_name}] Storing learned checkpoint...", end="") + self._traversability_estimator.save_checkpoint(self._params.general.model_path, "last_checkpoint.pt") + print("done") + + if self._ros_params.log_time: + print(f"[{self._node_name}] Storing timer data...", end="") + self._timer.store(folder=self._params.general.model_path) + print("done") + + print(f"[{self._node_name}] Joining learning thread...", end="") + if self._ros_params.mode != WVNMode.EXTRACT_LABELS: + self._learning_thread_stop_event.set() + self.learning_thread.join() + + # self.logging_thread_stop_event.set() + # self.logging_thread.join() + print("done") + + rospy.signal_shutdown(f"[{self._node_name}] Wild Visual Navigation killed {args}") + sys.exit(0) + + @accumulate_time + def read_params(self): + """Reads all the parameters from the parameter server""" + self._params = OmegaConf.structured(ExperimentParams) + self._ros_params = OmegaConf.structured(RosLearningNodeParams) + + # Override the empty dataclass with values from ros parmeter server + with read_write(self._ros_params): + for k in self._ros_params.keys(): + self._ros_params[k] = rospy.get_param(f"~{k}") + + self._ros_params.robot_height = rospy.get_param("~robot_height") # TODO robot_height currently not used + + with read_write(self._ros_params): + self._ros_params.mode = WVNMode.from_string(self._ros_params.mode) + + with read_write(self._params): + self._params.general.name = self._ros_params.mission_name + self._params.general.timestamp = self._ros_params.mission_timestamp + self._params.general.log_confidence = self._ros_params.log_confidence + self._params.loss.confidence_std_factor = self._ros_params.confidence_std_factor + self._params.loss.w_temp = 0 + + # Parse operation modes + if self._ros_params.mode == WVNMode.ONLINE: + rospy.logwarn( + f"[{self._node_name}] WARNING: online_mode enabled. The graph will not store any debug/training data such as images\n" + ) + + elif self._ros_params.mode == WVNMode.EXTRACT_LABELS: + with read_write(self._ros_params): + # TODO verify if this is needed + self._ros_params.image_callback_rate = 3 + self._ros_params.supervision_callback_rate = 4 + self._ros_params.image_graph_dist_thr = 0.2 + self._ros_params.supervision_graph_dist_thr = 0.1 + os.makedirs( + os.path.join(self._ros_params.extraction_store_folder, "image"), + exist_ok=True, + ) + os.makedirs( + os.path.join(self._ros_params.extraction_store_folder, "supervision_mask"), + exist_ok=True, + ) + + self._step = -1 + self._step_time = time_func() + self.anomaly_detection = self._params.model.name == "LinearRnvp" + + def setup_ros(self, setup_fully=True): + """Main function to setup ROS-related stuff: publishers, subscribers and services""" + if setup_fully: + # Initialize TF listener + self.tf_buffer = tf2_ros.Buffer(cache_time=rospy.Duration(30.0)) + self.tf_listener = tf2_ros.TransformListener(self.tf_buffer) + + # Robot state callback + robot_state_sub = message_filters.Subscriber(self._ros_params.robot_state_topic, RobotState) + cache1 = message_filters.Cache(robot_state_sub, 10) # noqa: F841 + desired_twist_sub = message_filters.Subscriber(self._ros_params.desired_twist_topic, TwistStamped) + cache2 = message_filters.Cache(desired_twist_sub, 10) # noqa: F841 + + self._robot_state_sub = message_filters.ApproximateTimeSynchronizer( + [robot_state_sub, desired_twist_sub], queue_size=10, slop=0.5 + ) + + rospy.loginfo( + f"[{self._node_name}] Start waiting for RobotState topic {self._ros_params.robot_state_topic} being published!" + ) + rospy.wait_for_message(self._ros_params.robot_state_topic, RobotState) + rospy.loginfo( + f"[{self._node_name}] Start waiting for TwistStamped topic {self._ros_params.desired_twist_topic} being published!" + ) + rospy.wait_for_message(self._ros_params.desired_twist_topic, TwistStamped) + self._robot_state_sub.registerCallback(self.robot_state_callback) + + self._camera_handler = {} + # Image callback + for cam in self._ros_params.camera_topics: + # Initialize camera handler for given cam + self._camera_handler[cam] = {} + # Store camera name + self._ros_params.camera_topics[cam]["name"] = cam + + # Set subscribers + if self._ros_params.mode == WVNMode.DEBUG: + # In debug mode additionally send the image to the callback function + self._visualizer = LearningVisualizer() + + imagefeat_sub = message_filters.Subscriber( + f"/wild_visual_navigation_node/{cam}/feat", ImageFeatures + ) + info_sub = message_filters.Subscriber(f"/wild_visual_navigation_node/{cam}/camera_info", CameraInfo) + image_sub = message_filters.Subscriber(f"/wild_visual_navigation_node/{cam}/image_input", Image) + sync = message_filters.ApproximateTimeSynchronizer( + [imagefeat_sub, info_sub, image_sub], queue_size=4, slop=0.5 + ) + sync.registerCallback(self.imagefeat_callback, self._ros_params.camera_topics[cam]) + + last_image_overlay_pub = rospy.Publisher( + f"/wild_visual_navigation_node/{cam}/debug/last_node_image_overlay", + Image, + queue_size=10, + ) + + self._camera_handler[cam]["debug"] = {} + self._camera_handler[cam]["debug"]["image_overlay"] = last_image_overlay_pub + + else: + print(f"/wild_visual_navigation_node/{cam}/feat") + imagefeat_sub = message_filters.Subscriber( + f"/wild_visual_navigation_node/{cam}/feat", ImageFeatures + ) + info_sub = message_filters.Subscriber(f"/wild_visual_navigation_node/{cam}/camera_info", CameraInfo) + sync = message_filters.ApproximateTimeSynchronizer( + [imagefeat_sub, info_sub], queue_size=4, slop=0.5 + ) + sync.registerCallback(self.imagefeat_callback, self._ros_params.camera_topics[cam]) + + # Wait for features message to determine the input size of the model + cam = list(self._ros_params.camera_topics.keys())[0] + + for cam in self._ros_params.camera_topics: + rospy.loginfo(f"[{self._node_name}] Waiting for feat topic {cam}...") + if self._ros_params.camera_topics[cam]["use_for_training"]: + feat_msg = rospy.wait_for_message(f"/wild_visual_navigation_node/{cam}/feat", ImageFeatures) + + feature_dim = int(feat_msg.features.layout.dim[1].size) + # Modify the parameters + with read_write(self._params): + self._params.model.simple_mlp_cfg.input_size = feature_dim + self._params.model.double_mlp_cfg.input_size = feature_dim + self._params.model.simple_gcn_cfg.input_size = feature_dim + self._params.model.linear_rnvp_cfg.input_size = feature_dim + rospy.loginfo(f"[{self._node_name}] Done") + + # 3D outputs + self._pub_debug_supervision_graph = rospy.Publisher( + "/wild_visual_navigation_node/supervision_graph", Path, queue_size=10 + ) + self._pub_mission_graph = rospy.Publisher("/wild_visual_navigation_node/mission_graph", Path, queue_size=10) + self._pub_graph_footprints = rospy.Publisher( + "/wild_visual_navigation_node/graph_footprints", Marker, queue_size=10 + ) + # 1D outputs + self._pub_instant_traversability = rospy.Publisher( + "/wild_visual_navigation_node/instant_traversability", + Float32, + queue_size=10, + ) + self._pub_system_state = rospy.Publisher( + "/wild_visual_navigation_node/system_state", SystemState, queue_size=10 + ) + + # Services + # Like, reset graph or the like + self._save_checkpt_service = rospy.Service("~save_checkpoint", SaveCheckpoint, self.save_checkpoint_callback) + self._load_checkpt_service = rospy.Service("~load_checkpoint", LoadCheckpoint, self.load_checkpoint_callback) + + self._pause_learning_service = rospy.Service("~pause_learning", SetBool, self.pause_learning_callback) + self._reset_service = rospy.Service("~reset", Trigger, self.reset_callback) + + @accumulate_time + def learning_thread_loop(self): + """This implements the main thread that runs the training procedure + We can only set the rate using rosparam + """ + # Set rate + rate = rospy.Rate(self._ros_params.learning_thread_rate) + # Learning loop + while True: + self._system_events["learning_thread_loop"] = { + "time": time_func(), + "value": "running", + } + self._learning_thread_stop_event.wait(timeout=0.01) + if self._learning_thread_stop_event.is_set(): + rospy.logwarn("Stopped learning thread") + break + + # Optimize model + with ClassContextTimer(parent_obj=self, block_name="training_step_time"): + res = self._traversability_estimator.train() + + if self._step != self._traversability_estimator.step: + self._step_time = time_func() + self._step = self._traversability_estimator.step + + # Publish loss + system_state = SystemState() + for k in res.keys(): + if hasattr(system_state, k): + setattr(system_state, k, res[k]) + + system_state.pause_learning = self._traversability_estimator.pause_learning + system_state.mode = self._ros_params.mode.value + system_state.step = self._step + self._pub_system_state.publish(system_state) + + # Get current weights + new_model_state_dict = self._traversability_estimator._model.state_dict() + + # Check the rate + ts = time_func() + if abs(ts - self._last_checkpoint_ts) > 1.0 / self._ros_params.load_save_checkpoint_rate: + + cg = self._traversability_estimator._traversability_loss._confidence_generator + new_model_state_dict["confidence_generator"] = cg.get_dict() + + fn = os.path.join(WVN_ROOT_DIR, ".tmp_state_dict.pt") + if os.path.exists(fn): + os.remove(fn) + torch.save(new_model_state_dict, fn) + self._last_checkpoint_ts = ts + print( + "Update model. Valid Nodes: ", + self._traversability_estimator._mission_graph.get_num_valid_nodes(), + " steps: ", + self._traversability_estimator._step, + ) + + rate.sleep() + + self._system_events["learning_thread_loop"] = { + "time": time_func(), + "value": "finished", + } + self._learning_thread_stop_event.clear() + + def logging_thread_loop(self): + rate = rospy.Rate(self._ros_params.logging_thread_rate) + + # Learning loop + while True: + self._learning_thread_stop_event.wait(timeout=0.01) + if self._learning_thread_stop_event.is_set(): + rospy.logwarn("Stopped logging thread") + break + + current_time = time_func() + tmp = self._system_events.copy() + rospy.loginfo("System Events:") + for k, v in tmp.items(): + value = v["value"] + msg = ( + (k + ": ").ljust(35, " ") + + (str(round(current_time - v["time"], 4)) + "s ").ljust(10, " ") + + f" {value}" + ) + rospy.loginfo(msg) + rate.sleep() + rospy.loginfo("--------------") + self._learning_thread_stop_event.clear() + + @accumulate_time + def robot_state_callback(self, state_msg, desired_twist_msg: TwistStamped): + """Main callback to process supervision info (robot state) + + Args: + state_msg (wild_visual_navigation_msgs/RobotState): Robot state message + desired_twist_msg (geometry_msgs/TwistStamped): Desired twist message + """ + if not self._setup_ready: + return + + self._system_events["robot_state_callback_received"] = { + "time": time_func(), + "value": "message received", + } + try: + ts = state_msg.header.stamp.to_sec() + if abs(ts - self._last_supervision_ts) < 1.0 / self._ros_params.supervision_callback_rate: + self._system_events["robot_state_callback_canceled"] = { + "time": time_func(), + "value": "canceled due to rate", + } + return + self._last_supervision_ts = ts + + # Query transforms from TF + success, pose_base_in_world = rc.ros_tf_to_torch( + self.query_tf( + self._ros_params.fixed_frame, + self._ros_params.base_frame, + state_msg.header.stamp, + ), + device=self._ros_params.device, + ) + if not success: + self._system_events["robot_state_callback_canceled"] = { + "time": time_func(), + "value": "canceled due to pose_base_in_world", + } + return + + success, pose_footprint_in_base = rc.ros_tf_to_torch( + self.query_tf( + self._ros_params.base_frame, + self._ros_params.footprint_frame, + state_msg.header.stamp, + ), + device=self._ros_params.device, + ) + if not success: + self._system_events["robot_state_callback_canceled"] = { + "time": time_func(), + "value": "canceled due to pose_footprint_in_base", + } + return + + # The footprint requires a correction: we use the same orientation as the base + pose_footprint_in_base[:3, :3] = torch.eye(3, device=self._ros_params.device) + + # Convert state to tensor + supervision_tensor, supervision_labels = rc.wvn_robot_state_to_torch( + state_msg, device=self._ros_params.device + ) + current_twist_tensor = rc.twist_stamped_to_torch(state_msg.twist, device=self._ros_params.device) + desired_twist_tensor = rc.twist_stamped_to_torch(desired_twist_msg, device=self._ros_params.device) + + # Update traversability + ( + traversability, + traversability_var, + is_untraversable, + ) = self._supervision_generator.update_velocity_tracking( + current_twist_tensor, desired_twist_tensor, velocities=["vx", "vy"] + ) + + # Create supervision node for the graph + supervision_node = SupervisionNode( + timestamp=ts, + pose_base_in_world=pose_base_in_world, + pose_footprint_in_base=pose_footprint_in_base, + twist_in_base=current_twist_tensor, + desired_twist_in_base=desired_twist_tensor, + width=self._ros_params.robot_width, + length=self._ros_params.robot_length, + height=self._ros_params.robot_height, + supervision=supervision_tensor, + traversability=traversability, + traversability_var=traversability_var, + is_untraversable=is_untraversable, + ) + + # Add node to the graph + self._traversability_estimator.add_supervision_node(supervision_node) + + if self._ros_params.mode == WVNMode.DEBUG or self._ros_params.mode == WVNMode.ONLINE: + self.visualize_supervision() + + if self._ros_params.print_supervision_callback_time: + print(f"[{self._node_name}]\n{self._timer}") + + self._system_events["robot_state_callback_state"] = { + "time": time_func(), + "value": "executed successfully", + } + + except Exception as e: + traceback.print_exc() + rospy.logerr(f"[{self._node_name}] error state callback", e) + self._system_events["robot_state_callback_state"] = { + "time": time_func(), + "value": f"failed to execute {e}", + } + + raise Exception("Error in robot state callback") + + @accumulate_time + def imagefeat_callback(self, *args): + """Main callback to process incoming images + + Args: + imagefeat_msg (wild_visual_navigation_msg/ImageFeatures): Incoming imagefeatures + info_msg (sensor_msgs/CameraInfo): Camera info message associated to the image + """ + if not self._setup_ready: + return + + if self._ros_params.mode == WVNMode.DEBUG: + assert len(args) == 4 + imagefeat_msg, info_msg, image_msg, camera_options = tuple(args) + else: + assert len(args) == 3 + imagefeat_msg, info_msg, camera_options = tuple(args) + + self._system_events["image_callback_received"] = { + "time": time_func(), + "value": "message received", + } + + if self._ros_params.verbose: + print(f"[{self._node_name}] Image callback: {camera_options['name']}... ", end="") + + try: + # Run the callback so as to match the desired rate + ts = imagefeat_msg.header.stamp.to_sec() + if abs(ts - self._last_image_ts) < 1.0 / self._ros_params.image_callback_rate: + return + self._last_image_ts = ts + + # Query transforms from TF + success, pose_base_in_world = rc.ros_tf_to_torch( + self.query_tf( + self._ros_params.fixed_frame, + self._ros_params.base_frame, + imagefeat_msg.header.stamp, + ), + device=self._ros_params.device, + ) + if not success: + self._system_events["image_callback_canceled"] = { + "time": time_func(), + "value": "canceled due to pose_base_in_world", + } + return + + success, pose_cam_in_base = rc.ros_tf_to_torch( + self.query_tf( + self._ros_params.base_frame, + imagefeat_msg.header.frame_id, + imagefeat_msg.header.stamp, + ), + device=self._ros_params.device, + ) + if not success: + self._system_events["image_callback_canceled"] = { + "time": time_func(), + "value": "canceled due to pose_cam_in_base", + } + return + + # Prepare image projector + K, H, W = rc.ros_cam_info_to_tensors(info_msg, device=self._ros_params.device) + image_projector = ImageProjector( + K=K, + h=H, + w=W, + new_h=self._ros_params.network_input_image_height, + new_w=self._ros_params.network_input_image_width, + ) + # Add image to base node + # convert image message to torch image + feature_segments = rc.ros_image_to_torch( + imagefeat_msg.feature_segments, + desired_encoding="passthrough", + device=self._ros_params.device, + ).clone() + h_small, w_small = feature_segments.shape[1:3] + + torch_image = None + # convert image message to torch image + if self._ros_params.mode == WVNMode.DEBUG: + torch_image = rc.ros_image_to_torch( + image_msg, + desired_encoding="passthrough", + device=self._ros_params.device, + ).clone() + + # Create mission node for the graph + mission_node = MissionNode( + timestamp=ts, + pose_base_in_world=pose_base_in_world, + pose_cam_in_base=pose_cam_in_base, + image=torch_image, + image_projector=image_projector, + camera_name=camera_options["name"], + use_for_training=camera_options["use_for_training"], + ) + ma = imagefeat_msg.features + dims = tuple(map(lambda x: x.size, ma.layout.dim)) + mission_node.features = torch.from_numpy( + np.array(ma.data, dtype=float).reshape(dims).astype(np.float32) + ).to(self._ros_params.device) + mission_node.feature_segments = feature_segments[0] + + # Add node to graph + added_new_node = self._traversability_estimator.add_mission_node(mission_node) + + if self._ros_params.mode == WVNMode.DEBUG: + # Publish current predictions + + # Publish supervision data depending on the mode + self.visualize_image_overlay() + + if added_new_node: + self._traversability_estimator.update_visualization_node() + + self.visualize_mission_graph() + + # Print callback time if required + if self._ros_params.print_image_callback_time: + rospy.loginfo(f"[{self._node_name}]\n{self._timer}") + + self._system_events["image_callback_state"] = { + "time": time_func(), + "value": "executed successfully", + } + + except Exception as e: + traceback.print_exc() + rospy.logerr(f"[{self._node_name}] error image callback", e) + self._system_events["image_callback_state"] = { + "time": time_func(), + "value": f"failed to execute {e}", + } + raise Exception("Error in image callback") + + @accumulate_time + def visualize_supervision(self): + """Publishes all the visualizations related to supervision info, + like footprints and the sliding graph + """ + # Get current time for later + now = rospy.Time.now() + + supervision_graph_msg = Path() + supervision_graph_msg.header.frame_id = self._ros_params.fixed_frame + supervision_graph_msg.header.stamp = now + + # Footprints + footprints_marker = Marker() + footprints_marker.id = 0 + footprints_marker.ns = "footprints" + footprints_marker.header.frame_id = self._ros_params.fixed_frame + footprints_marker.header.stamp = now + footprints_marker.type = Marker.TRIANGLE_LIST + footprints_marker.action = Marker.ADD + footprints_marker.scale.x = 1 + footprints_marker.scale.y = 1 + footprints_marker.scale.z = 1 + footprints_marker.color.a = 1.0 + footprints_marker.pose.orientation.w = 1.0 + footprints_marker.pose.position.x = 0.0 + footprints_marker.pose.position.y = 0.0 + footprints_marker.pose.position.z = 0.0 + + last_points = [None, None] + for node in self._traversability_estimator.get_supervision_nodes(): + # Path + pose = PoseStamped() + pose.header.stamp = now + pose.header.frame_id = self._ros_params.fixed_frame + pose.pose = rc.torch_to_ros_pose(node.pose_base_in_world) + supervision_graph_msg.poses.append(pose) + + # Color for traversability + r, g, b, _ = self._color_palette(node.traversability.item()) + c = ColorRGBA(r, g, b, 0.95) + + # Rainbow path + side_points = node.get_side_points() + + # if the last points are empty, fill and continue + if None in last_points: + for i in range(2): + last_points[i] = Point( + x=side_points[i, 0].item(), + y=side_points[i, 1].item(), + z=side_points[i, 2].item(), + ) + continue + else: + # here we want to add 2 triangles: from the last saved points (lp) + # and the current side points (sp): + # triangle 1: [lp0, lp1, sp0] + # triangle 2: [lp1, sp0, sp1] + + points_to_add = [] + # add the last points points + for lp in last_points: + points_to_add.append(lp) + # Add first from new side points + points_to_add.append( + Point( + x=side_points[i, 0].item(), + y=side_points[i, 1].item(), + z=side_points[i, 2].item(), + ) + ) + # Add last of last points + points_to_add.append(last_points[0]) + # Add new side points and update last points + for i in range(2): + last_points[i] = Point( + x=side_points[i, 0].item(), + y=side_points[i, 1].item(), + z=side_points[i, 2].item(), + ) + points_to_add.append(last_points[i]) + + # Add all the points and colors + for p in points_to_add: + footprints_marker.points.append(p) + footprints_marker.colors.append(c) + + # Untraversable plane + if node.is_untraversable: + untraversable_plane = node.get_untraversable_plane(grid_size=2) + N, D = untraversable_plane.shape + # the following is a 'hack' to show the triangles correctly + for n in [0, 1, 3, 2, 0, 3]: + p = Point() + p.x = untraversable_plane[n, 0] + p.y = untraversable_plane[n, 1] + p.z = untraversable_plane[n, 2] + footprints_marker.points.append(p) + footprints_marker.colors.append(c) + + # Publish + if len(footprints_marker.points) % 3 != 0: + if self._ros_params.verbose: + rospy.loginfo(f"[{self._node_name}] number of points for footprint is {len(footprints_marker.points)}") + return + self._pub_graph_footprints.publish(footprints_marker) + self._pub_debug_supervision_graph.publish(supervision_graph_msg) + + # Publish latest traversability + self._pub_instant_traversability.publish(self._supervision_generator.traversability) + self._system_events["visualize_supervision"] = { + "time": time_func(), + "value": f"executed successfully", + } + + @accumulate_time + def visualize_mission_graph(self): + """Publishes all the visualizations related to the mission graph""" + # Get current time for later + now = rospy.Time.now() + + # Publish mission graph + mission_graph_msg = Path() + mission_graph_msg.header.frame_id = self._ros_params.fixed_frame + mission_graph_msg.header.stamp = now + + for node in self._traversability_estimator.get_mission_nodes(): + pose = PoseStamped() + pose.header.stamp = now + pose.header.frame_id = self._ros_params.fixed_frame + pose.pose = rc.torch_to_ros_pose(node.pose_cam_in_world) + mission_graph_msg.poses.append(pose) + + self._pub_mission_graph.publish(mission_graph_msg) + + @accumulate_time + def visualize_image_overlay(self): + """Publishes all the debugging, slow visualizations""" + + # Get visualization node + vis_node = self._traversability_estimator.get_mission_node_for_visualization() + + # Publish reprojections of last node in graph + if vis_node is not None: + cam = vis_node.camera_name + torch_image = vis_node._image + torch_mask = vis_node._supervision_mask + torch_mask = torch.nan_to_num(torch_mask.nanmean(axis=0)) != 0 + torch_mask = torch_mask.float() + + image_out = self._visualizer.plot_detectron_classification(torch_image, torch_mask, cmap="Blues") + self._camera_handler[cam]["debug"]["image_overlay"].publish(rc.numpy_to_ros_image(image_out)) + + def pause_learning_callback(self, req): + """Start and stop the network training""" + prev_state = self._traversability_estimator.pause_learning + self._traversability_estimator.pause_learning = req.data + if not req.data and prev_state: + message = "Resume training!" + elif req.data and prev_state: + message = "Training was already paused!" + elif not req.data and not prev_state: + message = "Training was already running!" + elif req.data and not prev_state: + message = "Pause training!" + message += f" Updated the network for {self._traversability_estimator.step} steps" + + return True, message + + def reset_callback(self, req): + """Resets the system""" + rospy.logwarn(f"[{self._node_name}] System reset!") + + print(f"[{self._node_name}] Storing learned checkpoint...", end="") + self._traversability_estimator.save_checkpoint(self._params.general.model_path, "last_checkpoint.pt") + print("done") + + if self._ros_params.log_time: + print(f"[{self._node_name}] Storing timer data...", end="") + self._timer.store(folder=self._params.general.model_path) + print("done") + + # Create new mission folder + create_experiment_folder(self._params) + + # Reset traversability estimator + self._traversability_estimator.reset() + + print(f"[{self._node_name}] Reset done") + return TriggerResponse(True, "Reset done!") + + @accumulate_time + def save_checkpoint_callback(self, req): + """Service call to store the learned checkpoint + + Args: + req (TriggerRequest): Trigger request service + """ + if req.checkpoint_name == "": + req.checkpoint_name = "last_checkpoint.pt" + + if req.mission_path == "": + message = f"[WARNING] Store checkpoint {req.checkpoint_name} default mission path: {self._params.general.model_path}/{req.checkpoint_name}" + req.mission_path = self._params.general.model_path + else: + message = f"Store checkpoint {req.checkpoint_name} to: {req.mission_path}/{req.checkpoint_name}" + + self._traversability_estimator.save_checkpoint(req.mission_path, req.checkpoint_name) + return SaveCheckpointResponse(success=True, message=message) + + def load_checkpoint_callback(self, req): + """Service call to load a learned checkpoint + + Args: + req (TriggerRequest): Trigger request service + """ + if req.checkpoint_path == "": + return LoadCheckpointResponse( + success=False, + message=f"Path [{req.checkpoint_path}] is empty. Please check and try again", + ) + checkpoint_path = req.checkpoint_path + self._traversability_estimator.load_checkpoint(checkpoint_path) + return LoadCheckpointResponse(success=True, message=f"Checkpoint [{checkpoint_path}] loaded successfully") + + @accumulate_time + def query_tf(self, parent_frame: str, child_frame: str, stamp: Optional[rospy.Time] = None): + """Helper function to query TFs + + Args: + parent_frame (str): Frame of the parent TF + child_frame (str): Frame of the child + """ + + if stamp is None: + stamp = rospy.Time(0) + + try: + res = self.tf_buffer.lookup_transform(parent_frame, child_frame, stamp, timeout=rospy.Duration(0.03)) + trans = ( + res.transform.translation.x, + res.transform.translation.y, + res.transform.translation.z, + ) + rot = np.array( + [ + res.transform.rotation.x, + res.transform.rotation.y, + res.transform.rotation.z, + res.transform.rotation.w, + ] + ) + rot /= np.linalg.norm(rot) + return (trans, tuple(rot)) + except Exception: + if self._ros_params.verbose: + # print("Error in query tf: ", e) + rospy.logwarn(f"[{self._node_name}] Couldn't get between {parent_frame} and {child_frame}") + return (None, None) + + +if __name__ == "__main__": + fn = os.path.join(WVN_ROOT_DIR, ".tmp_state_dict.pt") + if os.path.exists(fn): + os.remove(fn) + + node_name = "wvn_learning_node" + rospy.init_node(node_name) + + reload_rosparams( + enabled=rospy.get_param("~reload_default_params", True), + node_name=node_name, + camera_cfg="wide_angle_dual", + ) + wvn = WvnLearning(node_name) + rospy.spin() diff --git a/wild_visual_navigation_ros/setup.py b/wild_visual_navigation_ros/setup.py index f376a2ec..33c335b0 100644 --- a/wild_visual_navigation_ros/setup.py +++ b/wild_visual_navigation_ros/setup.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from setuptools import setup from catkin_pkg.python_setup import generate_distutils_setup diff --git a/wild_visual_navigation_ros/src/wild_visual_navigation_ros/__init__.py b/wild_visual_navigation_ros/src/wild_visual_navigation_ros/__init__.py index ca21baa5..b8dda2bc 100644 --- a/wild_visual_navigation_ros/src/wild_visual_navigation_ros/__init__.py +++ b/wild_visual_navigation_ros/src/wild_visual_navigation_ros/__init__.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# from .ros_converter import ( robot_state_to_torch, wvn_robot_state_to_torch, @@ -5,3 +10,5 @@ ros_tf_to_torch, ros_image_to_torch, ) +from .scheduler import Scheduler +from .reload_rosparams import reload_rosparams \ No newline at end of file diff --git a/wild_visual_navigation_ros/src/wild_visual_navigation_ros/ros_converter.py b/wild_visual_navigation_ros/src/wild_visual_navigation_ros/ros_converter.py index a8dff961..93ce778c 100644 --- a/wild_visual_navigation_ros/src/wild_visual_navigation_ros/ros_converter.py +++ b/wild_visual_navigation_ros/src/wild_visual_navigation_ros/ros_converter.py @@ -1,3 +1,8 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# import cv2 from geometry_msgs.msg import Pose from nav_msgs.msg import Odometry @@ -8,7 +13,7 @@ import numpy as np import torch import torchvision.transforms as transforms - +from pytictac import Timer CV_BRIDGE = CvBridge() TO_TENSOR = transforms.ToTensor() TO_PIL_IMAGE = transforms.ToPILImage() @@ -106,15 +111,18 @@ def ros_tf_to_torch(tf_pose, device="cpu"): def ros_image_to_torch(ros_img, desired_encoding="rgb8", device="cpu"): - if isinstance(ros_img, Image): + if type(ros_img).__name__ == "_sensor_msgs__Image" or isinstance(ros_img, Image): np_image = CV_BRIDGE.imgmsg_to_cv2(ros_img, desired_encoding=desired_encoding) - elif isinstance(ros_img, CompressedImage): + elif type(ros_img).__name__ == "_sensor_msgs__CompressedImage" or isinstance(ros_img, CompressedImage): np_arr = np.fromstring(ros_img.data, np.uint8) np_image = cv2.imdecode(np_arr, cv2.IMREAD_COLOR) if "bgr" in ros_img.format: np_image = cv2.cvtColor(np_image, cv2.COLOR_BGR2RGB) - + + else: + raise ValueError("Image message type is not implemented.") + return TO_TENSOR(np_image).to(device) diff --git a/wild_visual_navigation_ros/src/wild_visual_navigation_ros/scheduler.py b/wild_visual_navigation_ros/src/wild_visual_navigation_ros/scheduler.py new file mode 100644 index 00000000..8183cfc6 --- /dev/null +++ b/wild_visual_navigation_ros/src/wild_visual_navigation_ros/scheduler.py @@ -0,0 +1,106 @@ +# +# Copyright (c) 2022-2024, ETH Zurich, Matias Mattamala, Jonas Frey. +# All rights reserved. Licensed under the MIT license. +# See LICENSE file in the project root for details. +# +class Scheduler: + """ + Implements a modified weighted round-robin scheduler + https://en.wikipedia.org/wiki/Weighted_round_robin + + Given a list of processes and corresponding weights, + it will produce a process schedule that assigns a fair ordering for + the processes given their weight + + Note: This does not handle process execution but only the schedule + """ + + def __init__(self): + self._processes = {} + self._schedule = [] + self._idx = 0 + + def add_process(self, name, weight: int = 1): + # Adds a process to the schedule with some weight (int) + self._processes[name] = weight + self._make_schedule() + + def step(self): + # Move the queue to the next process + self._idx = (self._idx + 1) % len(self._schedule) + + def get(self): + # Gets the current process in the schedule + if len(self._schedule) == 0: + return None + else: + # Get the current scheduled process + return self._schedule[self._idx] + + @property + def schedule(self): + return self._schedule + + def _make_schedule(self): + # Reset schedule + self._schedule = [] + + # Prepare some lists + weights = list(self._processes.values()) + processes = list(self._processes.keys()) + + # Get the total weight + w_total = sum(weights) + + # Get the number of processes + num_processes = len(processes) + + # Prepare queues + queues = [[p] * w for p, w in zip(processes, weights)] + + # Fill schedule + for w in range(w_total): + for i in range(num_processes): + if len(queues[i]) > 0 and weights[i] > w: + self._schedule.append(queues[i].pop()) + + +def run_scheduler(): + s = Scheduler() + s.add_process("p1", 1) + s.add_process("p2", 1) + s.add_process("p3", 1) + print(f"Schedule: {s.schedule}") + + assert s.get() == "p1" + s.step() + assert s.get() == "p2" + s.step() + assert s.get() == "p3" + s.step() + assert s.get() == "p1" + s.step() + + s = Scheduler() + s.add_process("p1", 2) + s.add_process("p2", 1) + s.add_process("p3", 2) + s.add_process("p4", 1) + print(f"Schedule: {s.schedule}") + + assert s.get() == "p1" + s.step() + assert s.get() == "p2" + s.step() + assert s.get() == "p3" + s.step() + assert s.get() == "p4" + s.step() + assert s.get() == "p1" + s.step() + assert s.get() == "p3" + s.step() + + +if __name__ == "__main__": + run_scheduler() diff --git a/wild_visual_navigation_sim/CMakeLists.txt b/wild_visual_navigation_sim/CMakeLists.txt new file mode 100644 index 00000000..d451ee18 --- /dev/null +++ b/wild_visual_navigation_sim/CMakeLists.txt @@ -0,0 +1,27 @@ +cmake_minimum_required(VERSION 3.0.2) +project(wild_visual_navigation_sim) + +find_package(catkin REQUIRED COMPONENTS + rospy + nav_msgs + sensor_msgs + std_msgs + wild_visual_navigation_ros +) + +catkin_package( + CATKIN_DEPENDS +) + +# catkin_python_setup() +# catkin_install_python(PROGRAMS scripts/wvn_feature_extractor_node.py +# scripts/wvn_learning_node.py +# scripts/overlay_images.py +# scripts/smart_carrot.py +# scripts/rosbag_play.sh +# DESTINATION ${CATKIN_PACKAGE_BIN_DESTINATION}) + +message(${CATKIN_PACKAGE_SHARE_DESTINATION}) +install(DIRECTORY launch Media worlds + DESTINATION ${CATKIN_PACKAGE_SHARE_DESTINATION} +) \ No newline at end of file diff --git a/wild_visual_navigation_sim/Media/models/OliveTree1.dae b/wild_visual_navigation_sim/Media/models/OliveTree1.dae new file mode 100644 index 00000000..6583bc3e --- /dev/null +++ b/wild_visual_navigation_sim/Media/models/OliveTree1.dae @@ -0,0 +1,4065 @@ + + + + + Google SketchUp 7.0.8657 + + 2010-03-16T17:45:56Z + 2010-03-16T17:45:56Z + + Z_UP + + + + textures/texture0.jpg + + + textures/texture0.png + + + + + + + + + + + + + + + + material_0_1_0-image + + + + + material_0_1_0-image-surface + + + + + + 0.000000 0.000000 0.000000 1 + + + 0.000000 0.000000 0.000000 1 + + + + + + 1 1 1 1 + + + 0.050000 + + + + + + 1 + + + + + + + + + material_1_2_0-image + + + + + material_1_2_0-image-surface + + + + + + 0.000000 0.000000 0.000000 1 + + + 0.000000 0.000000 0.000000 1 + + + + + + 1 1 1 1 + + + 0.010000 + + + + + + 1 + + + + + + + + + + 60.510791 32.918764 68.437903 61.241531 44.984435 54.582557 60.592489 45.240710 55.022876 60.646860 44.968439 54.000801 + + + + + + + + + + -0.402852 -0.768615 -0.496932 0.998781 -0.041493 -0.026754 -0.422988 0.441994 0.791026 -0.402852 -0.768615 -0.496932 -0.382975 -0.490402 -0.782838 -0.402852 -0.768615 -0.496932 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 76.423928 34.829583 75.201760 52.258967 52.161027 41.454530 50.851631 51.527530 42.773657 51.187957 50.909115 40.280269 + + + + + + + + + + 0.258341 -0.865661 -0.428824 0.685794 0.703685 -0.185777 -0.527730 0.069275 0.846583 0.258341 -0.865661 -0.428824 0.035420 -0.697149 -0.716051 0.258341 -0.865661 -0.428824 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 75.538131 52.945156 65.917918 56.725871 48.545627 48.574654 56.476626 47.793099 49.453964 56.728192 47.530392 47.994932 + + + + + + + + + + 0.746272 -0.482159 -0.458913 -0.023080 0.957569 -0.287279 -0.340423 -0.231570 0.911311 0.746272 -0.482159 -0.458913 0.444242 -0.544276 -0.711627 0.746272 -0.482159 -0.458913 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 29.053181 34.368903 51.011541 43.631410 38.149412 42.199841 43.657922 38.828749 42.603625 43.548271 38.824164 41.796058 + + + + + + + + + + -0.835385 0.292091 -0.465634 0.208307 -0.974789 -0.079959 0.262450 0.534488 0.803394 -0.835385 0.292091 -0.465634 -0.551080 0.417214 -0.722664 -0.835385 0.292091 -0.465634 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 34.115105 17.687037 55.407107 50.592995 50.647234 33.846793 49.383073 51.788801 34.834764 49.215159 51.579250 32.858823 + + + + + + + + + + -0.769270 -0.437502 -0.465634 0.871502 -0.483830 -0.079959 -0.230242 0.549132 0.803394 -0.769270 -0.437502 -0.465634 -0.676664 -0.141008 -0.722664 -0.769270 -0.437502 -0.465634 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 53.754279 24.077153 50.943638 46.994524 44.352797 38.438656 46.030126 44.324763 39.011679 46.047202 44.169957 37.865633 + + + + + + + + + + -0.234596 -0.853317 -0.465634 0.958364 0.274126 -0.079959 -0.551101 0.225489 0.803394 -0.234596 -0.853317 -0.465634 -0.378766 -0.578181 -0.722664 -0.234596 -0.853317 -0.465634 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 76.985684 42.667361 46.974709 66.422552 53.402223 38.163009 65.820973 53.085515 38.566793 65.879769 52.992847 37.759226 + + + + + + + + + + 0.164734 -0.869510 -0.465634 0.740039 0.667794 -0.079959 -0.594105 -0.039955 0.803394 0.164734 -0.869510 -0.465634 -0.085778 -0.685856 -0.722664 0.164734 -0.869510 -0.465634 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 88.901641 55.392049 51.370275 52.118328 53.182222 29.809961 51.734663 51.563615 30.797932 52.000097 51.522972 28.821991 + + + + + + + + + + 0.763523 -0.447457 -0.465634 -0.016742 0.996658 -0.079959 -0.360441 -0.473961 0.803394 0.763523 -0.447457 -0.465634 0.460448 -0.515504 -0.722664 0.763523 -0.447457 -0.465634 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 73.548051 69.205014 46.906806 59.368706 53.213072 34.401824 59.875183 52.391897 34.974847 60.000711 52.484088 33.828801 + + + + + + + + + + 0.856292 0.223492 -0.465634 -0.716582 0.692905 -0.079959 0.080271 -0.590012 0.803394 0.856292 0.223492 -0.465634 0.690102 -0.038931 -0.722664 0.856292 0.223492 -0.465634 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 53.155318 69.620604 66.429229 45.404411 61.063074 51.671262 45.884703 60.407852 51.836348 45.984737 60.834197 51.104227 + + + + + + + + + + 0.811972 0.536733 -0.229386 -0.836969 0.527641 0.145186 0.177951 -0.853759 0.489314 0.811972 0.536733 -0.229386 0.707316 0.379799 -0.596203 0.811972 0.536733 -0.229386 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 36.725664 75.676940 74.042099 49.858776 51.488970 37.327639 51.589762 51.097024 38.344401 51.610740 52.207806 36.579386 + + + + + + + + + + 0.243464 0.969266 -0.035338 -0.952687 -0.209105 -0.220598 0.536589 -0.541051 0.647561 0.243464 0.969266 -0.035338 0.399743 0.828782 -0.391568 0.243464 0.969266 -0.035338 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 27.057985 60.059161 60.840430 47.673498 56.206339 44.952437 48.353547 56.625508 45.850059 48.255620 57.215493 44.796256 + + + + + + + + + + -0.433589 0.896405 -0.091966 -0.472274 -0.746174 -0.469235 0.525458 -0.123454 0.841815 -0.433589 0.896405 -0.091966 -0.128456 0.923206 -0.362200 -0.433589 0.896405 -0.091966 + + + + + + + + + + -39.720115 12.765230 -1.736267 0.259387 -0.035743 0.832450 -31.484872 15.003045 -1.503014 0.031632 -0.114239 0.687616 48.508210 8.992771 0.731048 0.777517 1.064867 -0.011546 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 0 3 3 3 4 4 1 1 5 0 5 6 2 2 7 3 4 8

+
+
+
+ + + + 51.806852 51.190355 42.773657 49.532708 49.993297 13.482169 51.030867 50.506232 13.482169 51.057773 50.933888 42.773657 49.467752 48.960845 -0.199664 51.727526 51.928290 13.482169 51.187686 52.998793 42.773657 48.110651 50.689956 13.482169 51.714990 49.730247 -0.199664 47.334665 50.005833 -0.199664 52.155182 51.901384 42.773657 52.759978 51.863333 -0.199664 50.438606 52.742325 42.773657 51.898715 52.650463 42.773657 50.346744 51.282217 42.773657 51.990577 54.110571 -0.199664 49.792534 54.123107 13.482169 51.214592 53.426448 13.482169 47.597716 52.188115 13.482169 46.565264 52.253072 -0.199664 50.090277 52.031297 42.773657 48.294375 53.610173 13.482169 49.857490 55.155560 -0.199664 47.610252 54.386158 -0.199664 + + + + + + + + + + 0.603240 -0.684242 0.409774 -0.062656 -0.997305 0.038170 0.661088 -0.749857 0.026014 -0.056585 -0.901946 0.428126 -0.062706 -0.996676 0.052018 -0.062656 -0.997305 0.038170 0.997807 -0.062777 0.020975 0.056905 0.901926 0.428126 -0.749078 -0.660568 0.050307 -0.062656 -0.997305 0.038170 0.660417 -0.749096 0.052018 -0.062656 -0.997305 0.038170 -0.749096 -0.660417 0.052018 0.913797 -0.057491 0.402081 0.996676 -0.062706 0.052018 0.997807 -0.062777 0.020975 -0.591674 0.671462 0.446163 0.684242 0.603240 0.409774 -0.671165 -0.592011 0.446163 0.749096 0.660417 0.052018 -0.671165 -0.592011 0.446163 0.062833 0.997294 0.038170 -0.591674 0.671462 0.446163 0.749857 0.661088 0.026014 -0.996498 0.062694 0.055327 -0.996676 0.062706 0.052018 0.749857 0.661088 0.026014 -0.889477 0.055961 0.453541 -0.660381 0.749244 0.050307 0.062833 0.997294 0.038170 -0.889477 0.055961 0.453541 0.062706 0.996676 0.052018 -0.660381 0.749244 0.050307 0.062833 0.997294 0.038170 -0.660417 0.749096 0.052018 + + + + + + + + + + 1.974928 16.057968 -0.446871 5.108238 1.063399 5.108238 1.219793 16.057968 -0.824439 -0.046344 1.063399 5.076459 -0.446871 5.076459 1.166354 5.120635 0.567613 16.070365 -0.343916 5.120635 1.657627 0.408409 1.533725 -0.232230 2.248147 -0.152660 1.691396 16.048932 -0.730403 5.089799 0.779867 5.089799 1.440967 -0.046344 0.779867 5.079056 -1.107971 -0.043747 1.157434 -0.043747 1.322748 16.070365 -0.343916 5.070847 1.543922 -0.051956 1.166354 5.070847 0.943205 0.328839 2.335759 0.300339 0.936261 16.048932 -0.730403 5.079056 -0.721483 -0.051956 -0.481847 5.121670 0.116894 16.071400 -0.638241 16.071400 2.580360 0.067937 1.405990 -0.057295 -0.481847 5.065509 -0.859415 -0.057295 0.855592 -0.124160 -1.063399 5.080245 -1.219793 16.039378 -1.974928 16.039378 -0.779867 5.105740 -0.936261 16.055470 -1.691396 16.055470 0.638241 16.043588 -1.028423 5.071169 0.481847 5.071169 0.481847 5.077117 -1.405990 -0.045686 0.859415 -0.045686 1.028423 5.121670 1.028423 5.065509 0.610991 0.108242 0.446871 5.080245 0.730403 5.105740 -0.116894 16.043588 -1.028423 5.077117 -1.157434 -0.059233 0.730403 5.063570 -0.779867 5.063570 -1.322748 16.037570 0.343916 5.065152 -0.567613 16.037570 -1.440967 -0.056636 0.446871 5.066167 -1.063399 5.066167 -1.166354 5.065152 -1.166354 5.071779 0.721483 -0.051025 0.343916 5.071779 1.107971 -0.059233 0.824439 -0.056636 -1.543922 -0.051025 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 1 0 0 0 3 3 3 4 4 4 2 2 5 1 5 6 5 6 7 0 0 8 2 2 9 6 7 10 3 3 11 0 0 12 3 3 13 7 8 14 1 9 15 2 2 5 4 4 4 8 10 16 1 11 17 9 12 18 4 4 19 0 0 8 5 6 7 10 13 20 2 2 21 11 14 22 5 15 23 12 16 24 3 3 11 6 7 10 6 7 10 0 0 12 13 17 25 7 8 14 3 3 13 14 18 26 9 12 18 1 11 17 7 8 27 11 14 22 2 2 21 8 10 28 5 6 29 13 17 30 10 13 31 13 17 25 0 0 12 10 13 32 15 19 33 5 15 34 11 14 35 12 16 24 14 20 36 3 3 11 16 21 37 12 22 38 6 7 39 17 23 40 6 7 41 13 17 42 14 18 43 18 24 44 7 8 45 7 8 46 19 25 47 9 12 48 13 17 30 5 6 29 17 26 49 5 15 34 15 19 33 17 26 50 14 20 36 12 16 24 20 27 51 12 22 38 16 21 37 21 28 52 6 7 41 17 23 40 16 29 53 18 24 44 14 18 43 20 30 54 19 25 47 7 8 46 18 24 55 15 19 56 16 21 57 17 23 58 12 22 59 18 24 60 20 30 61 22 31 62 21 32 63 16 33 64 18 24 60 12 22 59 21 28 65 21 32 66 19 25 67 18 24 68 16 21 57 15 19 56 22 31 69 21 32 63 22 31 62 23 34 70 19 25 67 21 32 66 23 34 71

+
+
+
+ + + + 38.430064 54.907077 64.833638 39.856057 52.245948 63.944078 39.428168 52.913058 63.694024 38.187843 55.831461 66.843207 40.714444 50.920359 64.495669 42.372515 47.867521 63.624161 43.183873 46.156775 62.344420 39.273712 54.841987 70.245542 46.321748 40.075001 59.497766 43.077233 47.002667 64.956064 40.906718 50.128268 62.671024 44.453195 43.963446 62.242796 42.335933 48.945223 67.618520 44.594463 42.750911 58.421271 46.218242 41.101296 62.840245 43.835531 45.515931 64.201192 43.199834 47.408333 67.375815 40.053933 51.174834 61.059280 45.140399 43.040639 63.229021 40.593936 52.717165 70.755445 44.119620 43.262660 57.244505 45.175428 43.156543 63.920378 43.851491 46.767490 69.232587 38.670216 53.559264 61.144215 46.330794 41.176152 63.890938 42.982513 49.893667 75.694113 38.437132 53.502171 59.353625 44.540249 44.651076 65.533095 44.856544 44.898482 68.631500 43.839582 43.050206 54.527201 47.243574 40.328103 66.686815 45.526233 43.917722 69.272046 40.756349 48.916574 56.893458 45.421147 43.570008 67.197974 46.507325 42.414901 69.950720 38.613754 52.658293 57.220075 42.744577 45.021156 54.925026 47.013470 42.006727 71.745295 39.666946 50.608501 56.231130 42.429703 45.407580 54.329921 44.244012 47.567229 75.016228 41.156007 47.798978 55.181442 46.616343 43.192211 73.741420 41.463904 46.734832 53.063229 46.338296 45.188702 79.728417 47.435106 43.420226 80.139335 + + + + + + + + + + 0.953934 0.239846 -0.180233 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 30.575715 32.403870 63.803467 27.897498 32.538296 65.003837 28.516906 32.620854 63.198564 31.466184 32.279626 64.473679 31.833084 32.114058 66.287262 28.907410 32.299806 67.075624 30.553697 32.124148 67.588235 26.449640 32.419236 68.230027 31.676415 31.907168 69.244218 25.589698 32.384876 69.657511 34.973869 31.481237 71.266959 33.922420 31.738470 68.989860 33.599608 31.606554 71.125571 27.700086 32.132548 70.679603 30.604708 31.823540 71.571747 28.675386 31.909677 72.580298 32.484606 31.602809 72.427929 30.941874 31.598976 74.211758 32.969191 31.384135 74.823228 31.678002 31.471965 75.092462 + + + + + + + + + + -0.083017 -0.993802 -0.073932 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 37.735278 67.990790 106.708811 32.475642 26.287706 93.278191 27.240958 52.270939 105.361744 34.487383 35.495103 109.424487 40.275497 47.623036 112.619718 39.141903 33.910609 110.356023 + + + + + + + + + + -0.832299 0.554002 0.018989 -0.912788 0.006028 -0.408389 -0.806373 0.171846 0.565890 -0.806373 0.171846 0.565890 -0.630183 -0.085311 0.771746 -0.419602 0.204332 0.884411 -0.134506 0.177974 0.974799 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 3 3 1 1 4 3 4 5 0 0 6 2 2 7 4 5 8 5 6 9 2 3 10 3 4 11

+
+
+
+ + + + 49.067554 19.459712 88.504400 38.313093 15.544934 82.198832 48.031000 18.662668 79.821665 47.196941 29.820504 96.962588 42.034217 18.222532 79.206100 40.207443 21.568539 90.980997 52.445744 30.941021 87.977036 39.820031 19.116573 77.495941 46.827082 18.992317 79.111404 43.636956 25.714381 94.099152 38.278663 20.629691 91.860964 54.175589 25.668831 79.877518 56.128633 40.136121 105.175819 38.839030 21.179550 74.398793 46.221612 19.878103 77.743679 44.560408 28.011226 97.170984 40.965108 23.921608 94.387373 36.988165 20.930110 94.205573 57.618678 38.706744 87.639473 46.154258 34.271749 104.254792 53.667866 43.080476 95.637040 36.644549 21.780428 73.104143 34.430738 22.283989 100.286261 60.312527 34.881737 79.477049 41.403581 31.989149 105.007500 53.670290 43.099027 95.645833 35.013065 18.261610 84.180407 40.526980 25.593451 98.151793 37.653431 23.113112 97.417567 31.375444 18.603013 91.874513 58.112026 33.491876 71.804919 64.891252 50.695288 71.777178 42.387010 37.872651 108.043394 55.548847 57.475633 102.459973 56.475218 50.041701 95.712195 35.004997 18.900087 82.873134 34.476334 18.235922 85.473628 36.366254 34.801368 104.270737 54.800089 29.433437 74.205039 59.831925 36.800976 73.730729 62.582916 43.936262 73.437099 46.450971 39.879123 107.404404 53.099805 41.772131 109.040311 61.362307 47.972863 87.728044 32.733031 22.742727 80.141567 39.141903 33.910609 110.356023 32.034968 18.800452 89.940965 54.691324 30.461160 72.263309 51.074704 26.228762 74.537204 58.684686 34.945097 71.802576 60.781719 38.787201 74.503394 63.053561 46.031842 71.784698 53.791842 66.413007 112.602324 33.580008 23.559184 76.489488 32.456407 19.356082 87.813553 52.868185 28.347341 73.375236 60.829658 39.750285 72.962255 44.701101 46.654105 117.191319 32.544532 22.864279 80.324584 33.959848 24.152551 74.379807 32.450307 19.838833 86.825126 31.054606 20.053420 89.603640 50.110769 26.402821 72.877125 62.011843 42.752060 72.954152 40.275497 47.623036 112.619718 37.735278 67.990790 106.708811 31.088196 24.995789 79.262337 31.828427 21.165411 85.506825 46.120071 25.982768 75.151149 58.367859 73.539810 103.033544 31.400767 25.713857 77.049087 31.549769 20.897094 86.707749 44.955739 27.103670 74.862438 47.145101 27.395668 73.567739 54.513050 62.744431 108.439134 44.227813 25.496964 76.448903 + + + + + + + + + + 0.580406 -0.641805 0.501214 0.035955 -0.989437 0.140430 0.230600 -0.931998 -0.279649 0.487351 -0.692623 0.531754 0.035955 -0.989437 0.140430 0.035955 -0.989437 0.140430 0.118499 -0.808681 -0.576188 0.487351 -0.692623 0.531754 0.541186 -0.743352 0.393122 0.035955 -0.989437 0.140430 0.487351 -0.692623 0.531754 0.888156 -0.358166 0.287915 0.580406 -0.641805 0.501214 0.821355 -0.569145 -0.038076 0.953945 -0.271896 0.126737 0.821355 -0.569145 -0.038076 0.858260 -0.293687 0.420877 0.306465 -0.609287 0.731333 0.487351 -0.692623 0.531754 0.969670 0.021697 -0.243453 -0.328732 -0.805974 -0.492281 0.253907 -0.728755 0.635962 0.908829 -0.396234 0.130498 0.130104 -0.710182 0.691892 0.306465 -0.609287 0.731333 0.974673 -0.186121 0.123980 0.953945 -0.271896 0.126737 -0.707895 -0.636379 -0.306441 0.035955 -0.989437 0.140430 0.253907 -0.728755 0.635962 0.487351 -0.692623 0.531754 -0.360591 -0.932028 0.036024 0.035955 -0.989437 0.140430 0.791292 -0.556362 -0.253610 0.967272 -0.240088 0.082108 0.389153 -0.445894 0.806063 0.130104 -0.710182 0.691892 0.949622 -0.114461 0.291749 0.953945 -0.271896 0.126737 -0.740197 0.364519 -0.565008 0.520909 0.830371 -0.197832 0.838823 -0.283592 0.464706 0.799218 -0.327725 0.503832 0.974673 -0.186121 0.123980 0.838823 -0.283592 0.464706 0.130104 -0.710182 0.691892 -0.454089 -0.655204 0.603748 0.574256 -0.709839 -0.407871 0.821355 -0.569145 -0.038076 0.912133 -0.359755 -0.196444 0.092658 -0.351464 0.931605 0.389153 -0.445894 0.806063 0.485833 -0.181514 0.854996 0.949622 -0.114461 0.291749 0.799218 -0.327725 0.503832 0.770293 -0.316544 0.553578 -0.496127 -0.862466 0.100049 0.625458 -0.650185 -0.431348 0.523589 0.032396 0.851355 0.092658 -0.351464 0.931605 0.485833 -0.181514 0.854996 0.092658 -0.351464 0.931605 0.523589 0.032396 0.851355 0.485833 -0.181514 0.854996 0.183971 -0.683345 -0.706537 -0.045467 -0.277877 0.959540 0.523589 0.032396 0.851355 0.005890 -0.423825 0.905725 0.523589 0.032396 0.851355 -0.218750 0.612175 0.759862 -0.045467 -0.277877 0.959540 -0.362072 -0.566724 -0.740086 0.702349 -0.172089 0.690718 0.598198 0.478732 0.642631 0.523589 0.032396 0.851355 0.932594 -0.174928 0.315703 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 5 6 4 6 7 2 2 8 3 7 9 5 8 10 1 9 11 3 10 12 0 0 13 6 11 14 1 5 6 7 6 15 4 6 7 4 6 7 8 6 16 2 2 8 5 8 10 3 7 9 9 8 17 10 8 18 1 9 11 5 8 10 6 11 19 0 12 20 11 13 21 12 14 22 3 10 23 6 11 24 1 5 6 13 6 25 7 6 15 8 6 16 4 6 7 14 6 26 9 8 17 3 7 9 15 8 27 16 8 28 5 8 10 9 8 17 17 8 29 1 9 11 10 8 18 11 15 30 18 16 31 6 11 32 12 14 33 19 17 34 3 18 35 20 19 36 12 14 37 6 11 38 13 6 25 1 5 6 21 20 39 15 8 27 3 7 9 22 21 40 1 9 11 17 8 29 22 21 40 18 16 31 11 15 30 23 22 41 24 23 42 3 18 43 19 24 44 25 25 45 12 26 46 20 19 47 26 27 48 21 20 49 1 28 50 24 23 51 22 29 52 3 30 53 15 8 27 22 21 40 27 8 54 22 21 40 17 8 29 28 8 55 22 21 56 29 31 57 1 32 58 11 15 59 30 33 60 23 22 61 31 34 62 18 16 63 23 22 64 19 17 65 32 35 66 24 36 67 33 37 68 12 38 69 25 25 70 25 39 71 18 40 72 20 41 73 34 42 74 25 43 75 20 44 76 21 20 49 26 27 48 35 27 77 1 28 50 36 27 78 26 27 48 24 45 79 37 46 80 22 29 81 22 21 40 28 8 55 27 8 54 36 27 78 1 28 50 29 31 82 38 47 83 30 33 84 11 48 85 30 33 86 39 49 87 23 22 88 40 49 89 31 34 90 23 22 88 41 50 91 32 51 92 19 17 93 33 37 94 42 52 95 12 38 96 33 53 97 25 43 98 34 54 99 43 55 100 20 44 101 18 40 102 20 44 101 43 55 100 34 42 103 21 20 49 35 27 77 44 27 104 37 46 105 24 45 106 45 56 107 36 27 78 29 31 82 46 27 108 30 33 84 38 47 83 47 47 109 38 47 83 11 48 85 48 57 110 39 49 87 30 33 86 49 49 111 23 22 88 39 49 87 50 49 112 31 34 90 40 49 89 51 49 113 40 49 89 23 22 88 50 49 112 52 58 114 32 51 115 41 59 116 42 60 117 41 61 118 19 17 119 52 62 120 42 63 121 33 37 122 21 20 49 44 27 104 53 27 123 54 27 124 36 27 78 46 27 108 2 64 125 48 57 126 11 48 127 48 57 110 55 47 128 38 47 83 56 49 129 40 49 89 50 49 112 57 65 130 32 51 131 52 66 132 52 58 133 41 61 134 42 63 135 53 27 123 44 27 104 58 27 136 21 20 49 53 27 123 59 27 137 60 27 138 36 27 78 54 27 124 61 27 139 54 27 124 46 27 108 48 57 126 2 64 125 62 64 140 40 49 89 56 49 129 63 49 141 64 67 142 32 51 143 57 65 144 52 68 145 65 69 146 57 70 147 66 27 148 53 27 123 58 27 136 36 27 78 60 27 138 67 27 149 2 64 150 68 71 151 62 64 152 64 67 153 45 72 154 32 51 155 52 58 156 69 73 157 65 69 158 53 27 123 66 27 148 70 27 159 67 27 149 60 27 138 71 27 160 2 64 150 72 71 161 68 71 151 62 64 152 68 71 151 73 71 162 69 73 163 52 74 164 74 75 165 72 71 161 2 64 150 75 71 166

+
+
+
+ + + + 64.581305 67.725606 98.545343 58.070107 69.933502 104.539815 60.778614 57.102643 96.792983 62.343075 80.491635 99.953361 55.548847 57.475633 102.459973 63.421510 58.107906 88.236712 63.886232 82.093621 91.093614 56.475218 50.041701 95.712195 63.993207 65.010959 93.181347 64.056588 68.284371 93.570843 63.556883 65.094932 89.068926 63.881771 74.379009 91.539712 64.475945 59.195627 78.389412 63.876751 68.454334 91.867295 63.774388 76.321002 90.406046 64.528360 55.509835 76.754875 63.760964 64.432872 86.895865 64.205108 79.835073 87.323443 64.947686 52.781961 71.871187 64.715241 62.212978 77.002878 65.570847 77.309080 73.364163 64.891252 50.695288 71.777178 64.820502 55.820980 74.029582 64.459767 65.071476 80.344611 63.934992 75.934648 88.737154 65.013298 78.795165 79.202166 61.362307 47.972863 87.728044 63.967669 67.312412 85.781803 63.778655 72.265009 89.123024 65.517652 80.395230 74.822637 64.686402 79.509908 82.577098 64.680356 67.852943 79.066470 63.962748 70.862908 86.916366 65.268475 80.129844 77.147127 64.852874 83.921504 82.320550 65.442345 71.071917 72.695193 65.190462 82.442674 78.608438 64.410501 81.611976 85.884451 65.131658 68.263523 74.834966 64.624118 84.262297 84.633477 64.782872 66.705891 77.725507 + + + + + + + + + + 0.910580 -0.112594 0.397702 0.664289 -0.183856 0.724512 0.786509 -0.386428 0.481743 0.868652 0.098752 0.485481 0.702574 -0.253216 0.665035 0.964177 -0.234263 0.124430 0.997190 0.037139 0.065067 0.824635 -0.532007 0.192212 0.964177 -0.234263 0.124430 0.994382 -0.006684 -0.105640 0.910580 -0.112594 0.397702 0.964177 -0.234263 0.124430 0.997190 0.037139 0.065067 0.999815 -0.019212 -0.001340 0.994184 -0.031527 0.102973 0.982863 -0.118557 0.141157 0.962538 -0.204501 0.178047 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 1 4 0 0 5 1 1 6 4 4 7 2 2 8 0 0 9 2 2 10 5 5 11 6 6 12 3 3 13 0 0 14 2 2 15 7 7 16 5 8 17 8 9 18 0 10 19 5 11 20 0 10 19 9 9 21 6 12 22 0 10 19 8 9 18 9 9 21 10 13 23 8 9 18 5 11 20 6 12 22 9 9 21 11 9 24 5 11 25 12 14 26 10 13 27 11 9 24 9 9 21 13 9 28 6 12 22 11 9 24 14 13 29 5 11 25 15 14 30 12 14 26 10 13 27 12 14 26 16 14 31 14 13 32 17 14 33 6 12 34 5 11 25 18 14 35 15 14 30 16 14 31 12 14 26 19 14 36 14 13 32 20 14 37 17 14 33 18 14 35 5 11 25 21 15 38 22 14 39 15 14 30 18 14 35 23 14 40 16 14 31 19 14 36 24 14 41 20 14 37 14 13 32 17 14 33 20 14 37 25 14 42 5 11 43 26 16 44 21 15 45 16 14 31 23 14 40 27 14 46 28 14 47 20 14 37 24 14 41 29 14 48 25 14 42 20 14 37 17 14 33 25 14 42 30 14 49 27 14 46 23 14 40 31 14 50 32 14 51 20 14 37 28 14 47 25 14 42 29 14 48 33 14 52 34 14 53 17 14 33 30 14 49 27 14 46 31 14 50 32 14 51 32 14 51 35 14 54 20 14 37 36 14 55 25 14 42 33 14 52 17 14 33 34 14 53 37 14 56 32 14 51 31 14 50 38 14 57 32 14 51 38 14 57 35 14 54 37 14 56 34 14 53 39 14 58 38 14 57 31 14 50 40 14 59

+
+
+
+ + + + 60.873211 60.421228 64.063614 61.472295 59.281769 66.962060 60.913254 57.163725 64.027407 62.031046 61.320934 69.889570 62.199127 56.109397 70.349204 61.667607 61.484939 68.093096 61.520983 55.546301 66.934326 61.578072 54.098348 67.113727 62.060904 52.458275 69.397569 61.810195 49.331976 67.924183 + + + + + + + + + + 0.980296 0.014240 -0.197020 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + 61.358553 82.833911 68.998404 64.167684 84.955666 67.426393 61.625061 83.536788 66.561781 64.839807 84.942884 69.423759 59.577981 81.652190 69.250743 61.169071 82.047856 72.036585 63.751021 84.030028 70.445730 + + + + + + + + + + -0.525661 0.830953 0.182205 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 37.275392 24.406897 65.541776 39.658459 25.345167 65.145323 38.895732 25.079960 65.059991 40.924508 25.568713 66.597100 36.608356 23.939536 66.890703 42.452402 25.965547 67.580874 37.425239 24.021331 68.204984 47.919769 27.428559 70.841223 44.320834 26.745874 66.999864 39.482931 24.656008 68.923768 48.388894 28.267788 66.805486 39.842908 24.493564 70.703133 47.499119 28.192390 65.291111 37.865837 23.504561 72.305371 44.143206 27.216209 63.762566 38.898967 24.151160 70.683318 45.797675 26.412914 72.283430 46.516093 28.185568 63.155585 41.014071 24.574460 72.807360 45.499260 27.849561 62.935675 38.066593 23.413968 73.297710 44.487670 25.497534 74.917621 40.890753 24.515400 72.891403 42.656394 24.946153 74.196540 38.873297 23.526853 74.401462 43.661413 25.102023 75.479521 + + + + + + + + + + -0.339804 0.927893 0.153455 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 34.487383 35.495103 109.424487 32.307377 32.658985 106.348883 36.366254 34.801368 104.270737 + + + + + + + + + + 0.562721 -0.766969 0.308388 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 59.491792 72.158870 73.059818 58.534678 73.023118 69.095648 58.277355 72.310585 66.353934 58.741752 74.190666 72.355822 59.562517 70.649791 70.789391 57.854760 74.807101 68.354842 57.893556 72.663920 64.776336 57.912589 75.861856 70.557785 58.558444 70.058067 63.973078 56.737820 77.004465 65.837262 59.234414 68.672177 65.397302 57.160416 74.507949 63.836354 59.684386 68.469122 67.621509 56.649325 75.567237 62.779808 60.564456 67.855811 71.588270 55.837273 78.671840 63.622467 60.936434 65.346108 69.273260 56.121101 78.543760 65.025428 + + + + + + + + + + -0.942521 -0.291072 0.164106 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 59.603965 39.028001 67.962633 59.717700 41.613080 64.800624 58.822245 38.236661 65.714760 60.278972 40.215700 69.191151 60.842679 42.597725 68.253349 60.344229 42.225735 66.632729 61.278307 44.739086 67.104061 + + + + + + + + + + -0.927842 0.304416 0.215500 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 56.193265 69.794871 108.164572 53.420079 59.683570 109.971806 55.548847 57.475633 102.459973 + + + + + + + + + + 0.926586 -0.196857 0.320446 0.929653 -0.185905 0.318096 0.949622 -0.114461 0.291749 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 70.354442 36.227341 38.017338 70.768301 33.633519 37.119594 70.606673 34.303820 36.867240 70.418748 37.056215 40.045396 71.096160 32.299267 37.676260 71.589167 29.317324 36.796735 71.768613 27.679871 35.505220 71.029378 35.933429 43.479032 72.614901 21.786432 32.632376 71.922001 28.405589 38.140891 71.028495 31.593371 35.834828 72.186992 25.515230 35.402661 71.866199 30.213628 40.827842 71.957590 24.478261 31.545978 72.822273 22.659550 36.005606 72.121665 26.967396 37.379074 72.138379 28.704114 40.582903 70.625731 32.696667 34.208256 72.488915 24.560848 36.397959 71.509122 33.810596 43.993626 71.713094 25.034234 30.358385 72.550737 24.646141 37.095676 72.491431 27.991638 42.456758 70.167774 35.050993 34.293973 72.936123 22.689009 37.065966 72.667963 30.809006 48.977730 69.959908 35.070483 32.486909 72.454499 26.055661 38.723231 72.784997 26.168898 41.850141 71.422360 24.939395 27.616083 73.444774 21.731799 39.887565 73.056009 25.171870 42.496580 70.559605 30.640198 30.004109 72.870542 24.915992 40.403426 73.434229 23.656966 43.181498 69.864620 34.326477 30.333731 71.083901 26.871565 28.017568 73.733970 23.177212 44.992583 70.146241 32.341413 29.335687 70.935188 27.278936 27.416987 73.041981 28.537178 48.293608 70.569655 29.607639 28.276342 73.745346 24.264855 47.007072 70.519509 28.645164 26.138641 74.085711 25.985219 53.049150 74.483350 24.218970 53.463847 + + + + + + + + + + 0.974417 -0.085360 -0.207907 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 57.485562 19.967711 36.977690 55.322352 20.841398 38.189103 55.855037 20.732754 36.367222 58.182130 19.611832 37.654067 58.436544 19.371182 39.484336 56.084548 20.357571 40.279951 57.387753 19.744051 40.797277 54.098274 21.156113 41.444974 58.248806 19.245761 42.468495 53.381604 21.372919 42.885591 60.837964 17.955995 44.509846 60.046967 18.467065 42.211798 59.744022 18.450618 44.367157 55.044456 20.564124 43.917086 57.344023 19.482413 44.817438 55.782650 20.103000 45.835267 58.826414 18.765272 45.681496 57.557184 19.201740 47.481737 59.162433 18.447432 48.098832 58.126094 18.887568 48.370544 + + + + + + + + + + -0.403568 -0.910929 -0.085680 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 73.511673 47.180244 80.277763 57.304447 14.398769 66.723581 60.405710 37.248876 78.918303 61.581866 21.394055 83.018423 69.795733 29.713924 86.243051 64.956399 18.765230 83.958529 + + + + + + + + + + -0.604890 0.796004 0.022026 -0.836686 0.296253 -0.460641 -0.669527 0.404682 0.622869 -0.669527 0.404682 0.622869 -0.567671 0.114566 0.815245 -0.292371 0.293232 0.910238 -0.059686 0.184123 0.981089 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 3 3 1 1 4 3 4 5 0 0 6 2 2 7 4 5 8 5 6 9 2 3 10 3 4 11

+
+
+
+ + + + 68.997417 4.058052 61.905871 59.041605 3.904678 55.542292 67.918235 3.698253 53.143255 70.412183 13.107758 70.441875 62.863410 5.045309 52.522027 62.315270 8.316313 64.405253 75.046045 12.533127 61.373655 61.298099 6.411175 50.796135 67.022542 4.312297 52.426460 66.315761 10.746950 67.552095 60.462262 8.094200 65.293316 74.965621 7.706405 53.199622 80.693656 19.042091 78.730668 61.079577 8.386506 47.670493 66.777255 5.212956 51.046152 67.729358 12.371827 70.652188 63.608610 10.034650 67.842968 59.487069 8.708889 67.659495 81.511168 17.442527 61.032987 70.823522 17.063841 77.801167 79.509913 22.163597 69.104132 59.446922 9.505775 46.363932 57.770648 10.550556 73.796125 82.635554 13.530698 52.795469 66.267975 16.541285 78.560800 79.517192 22.178156 69.113006 57.103103 7.078188 57.542098 63.724876 11.533767 71.642021 60.655995 10.313759 70.901039 54.210236 8.395405 65.306990 80.430668 13.015279 45.052756 90.905552 25.224804 45.024759 68.752931 21.097330 81.624624 85.158157 33.460524 75.989836 83.801259 27.085793 69.179978 57.278412 7.605320 56.222797 56.654586 7.210020 58.847216 62.928626 20.288394 77.817259 76.551732 10.622983 47.474956 82.787404 15.245280 46.996283 87.082021 20.326616 46.699952 72.665306 21.588595 80.979756 78.670134 21.250002 82.630713 87.228945 23.992560 61.122373 56.505841 11.411423 53.466101 64.956399 18.765230 83.958529 54.808632 8.369762 63.355653 76.755187 11.498773 45.515363 72.576228 9.050305 47.810177 81.315511 14.046652 45.050391 84.134140 16.607319 47.776057 88.066054 21.915084 45.032348 86.260690 41.307795 86.225496 57.434722 11.841201 49.780422 55.313392 8.706401 61.208668 74.654196 10.280721 46.637519 84.447988 17.385321 46.220739 73.157506 27.656330 90.856710 56.385532 11.565054 53.650801 57.916040 12.220714 47.651332 55.445943 9.104964 60.211148 54.359814 9.679077 63.015224 71.833466 9.468067 46.134825 86.275143 19.515926 46.212561 69.795733 29.713924 86.243051 73.511673 47.180244 80.277763 55.795809 13.732172 52.578781 55.312776 10.372634 58.880719 68.433381 10.259974 48.429771 92.053075 45.862096 76.568683 56.257367 14.233359 50.345169 55.007256 10.231482 60.092691 67.795705 11.513154 48.138404 69.678586 11.129296 46.831794 85.808127 38.086680 82.024005 66.739495 10.399860 49.739464 + + + + + + + + + + 0.324383 -0.764031 0.557703 -0.289127 -0.943424 0.162347 -0.086231 -0.943453 -0.320096 0.222963 -0.776965 0.588739 -0.289127 -0.943424 0.162347 -0.289127 -0.943424 0.162347 -0.144832 -0.760444 -0.633047 0.222963 -0.776965 0.588739 0.260975 -0.857018 0.444310 -0.289127 -0.943424 0.162347 0.222963 -0.776965 0.588739 0.711725 -0.620492 0.329298 0.711725 -0.620492 0.329298 0.324383 -0.764031 0.557703 0.589480 -0.806575 -0.044157 0.810019 -0.567781 0.146608 0.589480 -0.806575 -0.044157 0.693800 -0.542250 0.473926 0.082675 -0.621088 0.779368 0.222963 -0.776965 0.588739 0.913973 -0.294110 -0.279557 -0.551940 -0.628068 -0.548537 0.001127 -0.722852 0.691002 0.726776 -0.670086 0.150934 -0.101604 -0.661031 0.743448 0.082675 -0.621088 0.779368 0.857670 -0.493791 0.143436 0.810019 -0.567781 0.146608 -0.863387 -0.363557 -0.349840 -0.289127 -0.943424 0.162347 0.001127 -0.722852 0.691002 -0.645833 -0.762335 0.041779 -0.289127 -0.943424 0.162347 0.589480 -0.806575 -0.044157 0.559229 -0.776272 -0.290971 0.834304 -0.543034 0.095135 0.200288 -0.495901 0.844966 -0.101604 -0.661031 0.743448 0.847373 -0.413155 0.333558 0.810019 -0.567781 0.146608 -0.550402 0.556937 -0.621995 0.759039 0.609835 -0.227949 0.674930 -0.523514 0.520003 0.621115 -0.547896 0.560380 0.857670 -0.493791 0.143436 0.674930 -0.523514 0.520003 -0.606573 -0.443237 0.660008 0.301529 -0.835103 -0.460090 0.559229 -0.776272 -0.290971 0.589480 -0.806575 -0.044157 0.739070 -0.634454 -0.226370 -0.024183 -0.317875 0.947824 0.357018 -0.295414 0.886154 0.847373 -0.413155 0.333558 0.621115 -0.547896 0.560380 0.593483 -0.524319 0.610629 -0.749957 -0.651262 0.115856 0.366397 -0.794050 -0.485013 0.451880 -0.125990 0.883137 -0.024183 -0.317875 0.947824 0.357018 -0.295414 0.886154 0.451880 -0.125990 0.883137 0.357018 -0.295414 0.886154 -0.046177 -0.651909 -0.756890 -0.116690 -0.215704 0.969461 0.451880 -0.125990 0.883137 -0.117621 -0.355166 0.927374 0.451880 -0.125990 0.883137 -0.005634 0.593529 0.804793 -0.116690 -0.215704 0.969461 -0.483908 -0.382256 -0.787219 0.562624 -0.363796 0.742365 0.675412 0.239844 0.697347 0.810020 -0.462838 0.360066 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 5 6 4 6 7 2 2 8 3 7 9 5 8 10 1 9 11 3 10 12 0 0 13 6 11 14 1 5 6 7 6 15 4 6 7 4 6 7 8 6 16 2 2 8 5 8 10 3 7 9 9 8 17 10 8 18 1 9 11 5 8 10 6 12 19 0 13 20 11 14 21 12 15 22 3 10 23 6 12 24 1 5 6 13 6 25 7 6 15 8 6 16 4 6 7 14 6 26 9 8 17 3 7 9 15 8 27 16 8 28 5 8 10 9 8 17 17 8 29 1 9 11 10 8 18 11 16 30 18 17 31 6 12 32 12 15 33 19 18 34 3 19 35 20 20 36 12 15 37 6 12 38 13 6 25 1 5 6 21 21 39 15 8 27 3 7 9 22 22 40 1 9 11 17 8 29 22 22 40 18 17 31 11 16 30 23 23 41 24 24 42 3 19 43 19 25 44 25 26 45 12 27 46 20 20 47 26 28 48 21 21 49 1 29 50 24 24 51 22 30 52 3 7 53 15 8 27 22 22 40 27 8 54 22 22 40 17 8 29 28 8 55 22 22 56 29 31 57 1 32 58 11 33 59 30 34 60 23 23 61 31 35 62 18 17 63 23 23 64 19 18 65 32 36 66 24 37 67 33 38 68 12 39 69 25 26 70 25 40 71 18 41 72 20 42 73 34 43 74 25 44 75 20 45 76 21 21 49 26 28 48 35 28 77 1 29 50 36 28 78 26 28 48 24 24 79 37 46 80 22 30 81 22 22 40 28 8 55 27 8 54 36 28 78 1 29 50 29 31 82 38 47 83 30 48 84 11 49 85 30 34 86 39 50 87 23 23 88 40 50 89 31 35 90 23 23 88 41 51 91 32 36 92 19 18 93 33 38 94 42 52 95 12 39 96 33 53 97 25 44 98 34 54 99 43 55 100 20 45 101 18 41 102 20 45 101 43 55 100 34 43 103 21 21 49 35 28 77 44 28 104 37 46 105 24 24 106 45 56 107 36 28 78 29 31 82 46 28 108 30 48 84 38 47 83 47 47 109 38 47 83 11 49 85 48 57 110 39 50 87 30 34 86 49 50 111 23 23 88 39 50 87 50 50 112 31 35 90 40 50 89 51 50 113 40 50 89 23 23 88 50 50 112 52 58 114 32 36 115 41 59 116 42 60 117 41 59 118 19 18 119 52 61 120 42 62 121 33 38 122 21 21 49 44 28 104 53 28 123 54 28 124 36 28 78 46 28 108 2 63 125 48 57 126 11 49 127 48 57 110 55 47 128 38 47 83 56 50 129 40 50 89 50 50 112 57 64 130 32 36 131 52 65 132 52 58 133 41 59 134 42 62 135 53 28 123 44 28 104 58 28 136 21 21 49 53 28 123 59 28 137 60 28 138 36 28 78 54 28 124 61 28 139 54 28 124 46 28 108 48 57 126 2 63 125 62 63 140 40 50 89 56 50 129 63 50 141 64 66 142 32 36 143 57 64 144 52 67 145 65 68 146 57 69 147 66 28 148 53 28 123 58 28 136 36 28 78 60 28 138 67 28 149 2 63 150 68 70 151 62 63 152 64 66 153 45 71 154 32 36 155 52 58 156 69 72 157 65 68 158 53 28 123 66 28 148 70 28 159 67 28 149 60 28 138 71 28 160 2 63 150 72 70 161 68 70 151 62 63 152 68 70 151 73 70 162 69 72 163 52 61 164 74 73 165 72 70 161 2 63 150 75 70 166

+
+
+
+ + + + 95.503754 39.312183 72.039191 90.780662 42.982531 78.088812 89.350779 31.663642 70.270710 97.301735 50.443769 73.460163 85.158157 33.460524 75.989836 91.809721 31.736854 61.635721 99.026726 51.320875 64.518906 83.801259 27.085793 69.179978 94.246764 37.248306 66.625847 95.231658 39.921016 67.018926 93.912031 37.441669 62.475591 96.824694 44.980673 64.969109 92.986433 32.330496 51.697826 95.132264 40.111974 65.299706 97.289816 46.607607 63.825013 91.979212 29.285809 50.048251 93.891126 36.839295 60.282538 98.645246 49.373463 60.714049 91.546563 26.923983 45.119633 94.042965 34.742592 50.298535 99.048084 46.907893 46.626345 90.905552 25.224804 45.024759 92.308018 29.458324 47.297885 94.647524 37.165101 53.671012 97.311739 46.244255 62.140767 99.013251 48.288347 52.518057 87.228945 23.992560 61.122373 94.881596 39.147397 59.158226 96.137523 43.272333 62.530187 99.883790 49.459892 48.098236 98.948214 48.969024 55.924038 95.621461 39.388629 52.381113 95.889305 42.067335 60.303228 99.603341 49.312749 50.444112 100.342189 52.547954 55.665130 97.165105 41.817514 45.951220 100.198280 51.236144 51.918867 99.320429 50.775562 59.261819 96.109434 39.597526 48.110678 100.251263 52.893274 57.999336 95.378865 38.416530 51.027813 + + + + + + + + + + 0.801846 -0.394010 0.449220 0.522044 -0.360002 0.773220 0.593220 -0.599162 0.537676 0.820324 -0.183873 0.541535 0.522044 -0.360002 0.773220 0.541003 -0.437149 0.718482 0.832036 -0.535717 0.143955 0.953649 -0.291317 0.075422 0.601066 -0.767876 0.221556 0.832036 -0.535717 0.143955 0.935537 -0.331378 -0.122306 0.801846 -0.394010 0.449220 0.953649 -0.291317 0.075422 0.938369 -0.345633 -0.001555 0.927318 -0.354775 0.119231 0.886764 -0.432459 0.163181 0.837881 -0.505728 0.205412 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 0 0 9 2 2 10 5 6 11 6 7 12 3 3 13 0 0 14 2 2 15 7 8 16 5 9 17 8 10 18 0 11 19 5 9 20 0 11 19 9 10 21 6 12 22 0 11 19 8 10 18 9 10 21 10 13 23 8 10 18 5 9 20 6 12 22 9 10 21 11 10 24 5 9 25 12 14 26 10 13 27 11 10 24 9 10 21 13 10 28 6 12 22 11 10 24 14 13 29 5 9 25 15 14 30 12 14 26 10 13 27 12 14 26 16 14 31 14 13 32 17 14 33 6 12 34 5 9 25 18 14 35 15 14 30 16 14 31 12 14 26 19 14 36 14 13 32 20 14 37 17 14 33 18 14 35 5 9 25 21 15 38 22 14 39 15 14 30 18 14 35 23 14 40 16 14 31 19 14 36 24 14 41 20 14 37 14 13 32 17 14 33 20 14 37 25 14 42 5 9 43 26 16 44 21 15 45 16 14 31 23 14 40 27 14 46 28 14 47 20 14 37 24 14 41 29 14 48 25 14 42 20 14 37 17 14 33 25 14 42 30 14 49 27 14 46 23 14 40 31 14 50 32 14 51 20 14 37 28 14 47 25 14 42 29 14 48 33 14 52 34 14 53 17 14 33 30 14 49 27 14 46 31 14 50 32 14 51 32 14 51 35 14 54 20 14 37 36 14 55 25 14 42 33 14 52 17 14 33 34 14 53 37 14 56 32 14 51 31 14 50 38 14 57 32 14 51 38 14 57 35 14 54 37 14 56 34 14 53 39 14 58 38 14 57 31 14 50 40 14 59

+
+
+
+ + + + 90.374207 34.364589 37.240230 90.541956 33.257229 40.165342 89.478860 31.675485 37.203691 91.582337 34.774216 43.119785 90.235415 30.442395 43.583647 91.330323 35.012596 41.306783 89.517515 30.172770 40.137353 89.151832 28.966273 40.318404 89.081366 27.480532 42.623257 87.984408 24.982131 41.136316 + + + + + + + + + + 0.924702 -0.305590 -0.227026 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + 97.159908 52.649689 42.220420 100.073654 53.593295 40.633947 97.579272 53.151515 39.761381 100.622502 53.391259 42.649688 95.359519 52.185699 42.475081 96.780157 52.057541 45.286552 99.467383 52.951146 43.681061 + + + + + + + + + + -0.223215 0.951845 0.210155 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 60.713922 11.484987 38.731991 62.940193 11.577170 38.331891 62.237652 11.576516 38.245774 64.044597 11.400152 39.797025 60.032434 11.290892 40.093329 65.413621 11.290963 40.789849 60.727226 11.125348 41.419701 70.324742 10.935582 44.080194 67.171849 11.399967 40.203494 62.599524 11.060695 42.145098 70.949514 11.491752 40.007328 62.849138 10.824585 43.940833 70.196626 11.683326 38.479020 60.942142 10.575006 45.557812 67.159865 11.837203 36.936412 61.975638 10.812113 43.920836 68.290944 10.705428 45.535669 69.386627 11.957843 36.323847 63.834895 10.557346 46.064419 68.455033 11.971401 36.101914 61.081350 10.443330 46.559279 66.953262 10.326279 48.194094 63.716697 10.543940 46.149235 65.290816 10.394883 47.466379 61.776634 10.306243 47.673187 66.161367 10.236617 48.761164 + + + + + + + + + + -0.017067 0.984011 0.177288 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 61.581866 21.394055 83.018423 58.981700 19.683961 79.914524 62.928626 20.288394 77.817259 + + + + + + + + + + 0.275967 -0.894396 0.351992 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 92.583442 44.406675 46.319200 92.042964 45.389834 42.318559 91.628398 44.877454 39.551621 92.545887 46.290560 45.608726 92.211550 43.146046 44.027885 91.992432 47.050033 41.570937 91.413599 45.277266 37.959510 92.340533 47.900571 43.794147 91.217575 42.945766 37.148861 91.700463 49.174568 39.030196 91.378303 41.613929 38.586188 91.336429 47.001989 37.010879 91.690321 41.318792 40.830857 91.218164 48.018373 35.944613 92.238975 40.563859 44.834113 91.435342 50.801785 36.795025 91.829576 38.394864 42.497805 91.632154 50.615622 38.210893 + + + + + + + + + + -0.981311 0.033544 0.189483 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 83.234635 17.140872 41.175120 84.064774 19.233418 37.984021 82.366554 16.713144 38.906567 84.127945 17.924820 42.414941 85.270101 19.722228 41.468511 84.754368 19.558488 39.832981 86.238395 21.358307 40.308650 + + + + + + + + + + -0.770789 0.586840 0.247997 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 89.198379 43.403402 81.746917 84.037469 35.882082 83.570777 85.158157 33.460524 75.989836 + + + + + + + + + + 0.796976 -0.481032 0.365292 0.803550 -0.471968 0.362704 0.847373 -0.413155 0.333558 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 26.277122 65.352085 40.614193 25.425454 68.248588 39.724633 25.707879 67.508074 39.474579 26.325192 64.397703 42.623762 24.856285 69.721701 40.276224 23.857547 73.049093 39.404717 23.413176 74.889606 38.124975 25.464614 65.588323 46.026097 21.585311 81.484540 35.278321 23.344580 74.039783 40.736620 24.830054 70.536372 38.451579 22.619205 77.296156 38.023352 23.672970 71.986685 43.399076 22.728881 78.511955 34.201827 21.476757 80.458766 38.620800 22.906339 75.650168 39.981748 23.141613 73.667761 43.156370 25.450798 69.337531 36.839835 22.135234 78.339992 39.009576 24.606806 67.938222 46.536000 23.089038 77.913918 33.025060 22.077243 78.233700 39.700933 22.634777 74.428323 45.013143 26.317667 66.720526 36.924771 21.351275 80.408508 39.671493 22.846101 71.190507 51.474668 26.557501 66.728748 35.134180 22.393373 76.640859 41.313650 22.033169 76.463363 44.412055 23.406604 78.064615 30.307757 20.631207 81.425296 42.467371 21.578193 77.560346 45.052601 25.225032 71.691710 32.674013 21.752165 77.879222 42.978529 20.925155 79.232037 45.731276 26.557181 67.590911 33.000631 24.075418 75.911393 30.705582 20.513176 79.735090 47.525851 25.945420 69.812759 32.011685 24.304616 75.468744 30.110476 22.087008 73.725752 50.796783 25.062364 72.867417 30.961997 20.659484 78.493448 49.521975 24.978587 73.972038 28.843784 20.523381 76.482288 55.508973 19.811396 78.437685 55.919890 + + + + + + + + + + -0.982822 -0.039702 -0.180233 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 38.567305 85.773557 39.584022 41.161435 85.094288 40.784392 40.538234 85.140141 38.979119 37.721061 86.077274 40.254234 37.395773 86.314372 42.067817 40.221635 85.534261 42.856179 38.646060 86.042866 43.368790 42.603043 84.914751 44.010582 37.591439 86.484852 45.024773 43.451839 84.772530 45.438066 34.450771 87.576099 47.047514 35.427396 87.109284 44.770416 35.770363 87.172399 46.906126 41.437649 85.451092 46.460158 38.657600 86.347552 47.352302 40.528536 85.868697 48.360853 36.862568 86.948051 48.208484 38.373482 86.636320 49.992313 36.432942 87.261200 50.603783 37.678884 86.911182 50.873017 + + + + + + + + + + 0.284491 0.955824 -0.073932 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 24.281658 52.402784 82.489366 37.958260 92.148999 69.058746 37.768853 65.644387 81.142300 34.106156 83.547571 85.205042 25.960246 72.859579 88.400274 29.874021 86.050412 86.136578 + + + + + + + + + + 0.701419 -0.712496 0.018989 0.892266 -0.192562 -0.408389 0.754191 -0.333115 0.565890 0.754191 -0.333115 0.565890 0.634312 -0.045361 0.771746 0.368949 -0.285821 0.884411 0.095269 -0.201719 0.974799 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 3 3 1 1 4 3 4 5 0 0 6 2 2 7 4 5 8 5 6 9 2 3 10 3 4 11

+
+
+
+ + + + 23.113276 102.225680 64.284955 34.441024 103.858484 57.979387 24.290917 102.793909 55.602220 22.825617 91.701306 72.743144 30.250978 101.998425 54.986655 31.354903 98.349560 66.761553 17.458594 91.677827 63.757591 32.235545 100.670486 53.276497 25.401982 102.225030 54.891960 27.150056 94.992652 69.879707 33.434914 98.874140 67.641519 16.843447 97.192349 55.658073 11.973172 83.430180 80.956375 32.773945 98.450493 50.179349 25.813517 101.234146 53.524234 25.776424 92.933188 72.951540 30.132056 96.201157 70.167928 34.636706 98.316168 69.986128 10.806918 85.134059 63.420028 22.936003 87.130902 80.035348 13.779828 80.044830 71.417595 34.799173 97.413551 48.884698 36.863224 96.467916 76.066816 8.952196 89.429115 55.257604 28.053069 88.393770 80.788056 13.773661 80.027167 71.426388 37.115765 100.524375 59.960963 30.219040 94.475049 73.932349 33.539083 96.315343 73.198122 40.606697 99.446308 67.655069 11.390416 90.339611 47.585475 1.236423 74.886076 47.557733 25.887269 82.835710 83.823949 8.994844 66.338534 78.240529 9.608259 73.804806 71.492750 36.993096 99.897741 58.653689 37.646406 100.439761 61.254183 32.408856 84.610867 80.051293 15.462297 93.635006 49.985594 9.030165 87.452153 49.511285 4.878171 81.030221 49.217655 21.498875 81.702703 83.184960 14.603436 81.209360 84.820867 5.247517 76.829315 63.508599 38.431245 95.671698 55.922123 29.874021 86.050412 86.136578 39.920736 99.387912 65.721520 15.358598 92.606760 48.043865 19.764298 96.010132 50.317759 10.532679 89.034207 47.583131 7.694267 85.702132 50.283950 3.988935 79.075171 47.565253 8.887061 57.230729 88.382879 37.435204 95.045699 52.270043 39.394578 98.930206 63.594108 17.575477 94.303084 49.155792 7.450394 84.769203 48.742811 21.826309 74.713053 92.971874 38.590903 95.514168 56.105139 36.942050 94.542547 50.160362 39.301829 98.456409 62.605682 40.624153 97.960942 65.384195 20.672268 95.642630 48.657680 5.679340 82.072616 48.734707 25.960246 72.859579 88.400274 24.281658 52.402784 82.489366 39.580578 93.129887 55.042892 39.639287 97.030693 61.287381 24.664532 95.237725 50.931704 2.950344 51.190312 78.814099 39.127770 92.490913 52.829642 39.966926 97.236356 62.488304 25.575038 93.902409 50.642993 23.372231 94.064297 49.348295 8.931303 60.969263 84.219689 26.616147 95.326303 52.229459 + + + + + + + + + + -0.436894 0.746932 0.501214 0.167141 0.975881 0.140430 -0.035137 0.959459 -0.279649 -0.335413 0.777647 0.531754 0.167141 0.975881 0.140430 0.049378 0.815824 -0.576188 -0.335413 0.777647 0.531754 -0.377737 0.838314 0.393122 0.167141 0.975881 0.140430 -0.335413 0.777647 0.531754 -0.796144 0.532222 0.287915 -0.796144 0.532222 0.287915 -0.436894 0.746932 0.501214 -0.687610 0.725081 -0.038076 -0.878183 0.461228 0.126737 -0.687610 0.725081 -0.038076 -0.780065 0.462991 0.420877 -0.175392 0.659082 0.731333 -0.335413 0.777647 0.531754 -0.953615 0.177055 -0.243453 0.486604 0.721717 -0.492281 -0.099514 0.765277 0.635962 -0.808594 0.573712 0.130498 0.017875 0.721780 0.691892 -0.916015 0.381505 0.123980 0.823072 0.478169 -0.306441 0.167141 0.975881 0.140430 -0.099514 0.765277 0.635962 -0.335413 0.777647 0.531754 0.543567 0.838592 0.036024 -0.687610 0.725081 -0.038076 -0.660796 0.706421 -0.253610 -0.897734 0.432818 0.082108 -0.289746 0.516051 0.806063 -0.906147 0.306236 0.291749 -0.878183 0.461228 0.126737 0.650012 -0.508183 -0.565008 -0.679708 -0.706300 -0.197832 -0.763103 0.449135 0.464706 -0.715310 0.484236 0.503832 -0.916015 0.381505 0.123980 -0.763103 0.449135 0.464706 0.017875 0.721780 0.691892 0.578480 0.548498 0.603748 -0.416961 0.812271 -0.407871 -0.687610 0.725081 -0.038076 -0.819289 0.538680 -0.196444 -0.808594 0.573712 0.130498 -0.018827 0.362985 0.931605 -0.438448 0.277029 0.854996 -0.906147 0.306236 0.291749 -0.715310 0.484236 0.503832 -0.689282 0.467377 0.553578 0.662014 0.742784 0.100049 -0.479281 0.764349 -0.431348 -0.519149 0.075361 0.851355 -0.018827 0.362985 0.931605 -0.438448 0.277029 0.854996 -0.519149 0.075361 0.851355 -0.906147 0.306236 0.291749 -0.040342 0.706525 -0.706537 0.101331 0.262707 0.959540 -0.519149 0.075361 0.851355 0.080905 0.416073 0.905725 0.088940 -0.643972 0.759862 0.470313 0.480706 -0.740086 -0.652315 0.312080 0.690718 -0.683455 -0.346286 0.642631 -0.519149 0.075361 0.851355 -0.877114 0.361943 0.315703 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 1 4 0 0 5 1 4 6 4 5 7 2 2 8 3 6 9 5 7 10 1 8 11 3 9 12 0 0 13 6 10 14 1 4 6 7 5 15 4 5 7 4 5 7 8 5 16 2 2 8 5 7 10 3 6 9 9 7 17 10 7 18 1 8 11 5 7 10 6 11 19 0 12 20 11 13 21 12 14 22 3 9 23 6 11 24 1 4 6 13 5 25 7 5 15 8 5 16 4 5 7 14 5 26 9 7 17 3 6 9 15 7 27 16 7 28 5 7 10 9 7 17 17 7 29 1 8 11 10 7 18 11 15 30 18 16 31 6 11 32 12 14 33 19 17 34 3 18 35 20 19 36 12 14 37 6 11 38 13 5 25 1 4 6 21 20 39 15 7 27 3 6 9 22 21 40 1 8 11 17 7 29 22 21 40 18 16 31 11 15 30 23 22 41 24 23 42 3 18 43 19 17 44 25 24 45 12 14 46 20 19 47 26 25 48 21 20 49 1 26 50 24 23 51 22 27 52 3 28 53 15 7 27 22 21 40 27 7 54 22 21 40 17 7 29 28 7 55 22 21 56 29 29 57 1 4 58 11 30 59 30 31 60 23 22 61 31 32 62 18 16 63 23 22 64 19 17 65 32 33 66 24 23 67 33 34 68 12 35 69 25 24 70 25 36 71 18 37 72 20 38 73 34 39 74 25 40 75 20 41 76 21 20 49 26 25 48 35 25 77 1 26 50 36 25 78 26 25 48 24 42 79 37 43 80 22 27 81 22 21 40 28 7 55 27 7 54 36 25 78 1 26 50 29 29 82 38 44 83 30 31 84 11 45 85 30 31 86 39 46 87 23 47 88 40 46 89 31 32 90 23 47 88 41 48 91 32 33 92 19 17 93 33 34 94 42 49 95 12 35 96 33 50 97 25 40 98 34 51 99 43 52 100 20 41 101 18 37 102 20 41 101 43 52 100 34 39 103 21 20 49 35 25 77 44 25 104 37 43 105 24 42 106 45 53 107 36 25 78 29 29 82 46 25 108 30 31 84 38 44 83 47 44 109 38 44 83 11 45 85 48 54 110 39 46 87 30 31 86 49 46 111 23 47 88 39 46 87 50 46 112 31 32 90 40 46 89 51 46 113 40 46 89 23 47 88 50 46 112 52 55 114 32 33 115 41 56 116 42 57 117 41 56 118 19 17 119 52 58 120 42 57 121 33 59 122 21 20 49 44 25 104 53 25 123 54 25 124 36 25 78 46 25 108 2 60 125 48 54 126 11 45 127 48 54 110 55 44 128 38 44 83 56 46 129 40 46 89 50 46 112 57 61 130 32 33 131 52 62 132 52 55 133 41 56 134 42 57 135 53 25 123 44 25 104 58 25 136 21 20 49 53 25 123 59 25 137 60 25 138 36 25 78 54 25 124 61 25 139 54 25 124 46 25 108 48 54 126 2 60 125 62 60 140 40 46 89 56 46 129 63 46 141 64 63 142 32 33 143 57 61 144 52 62 145 65 64 146 57 61 147 66 25 148 53 25 123 58 25 136 36 25 78 60 25 138 67 25 149 2 60 150 68 65 151 62 60 152 64 63 153 45 66 154 32 33 155 52 55 156 69 67 157 65 64 158 53 25 123 66 25 148 70 25 159 67 25 149 60 25 138 71 25 160 2 60 150 72 65 161 68 65 151 62 60 152 68 65 151 73 65 162 69 67 163 52 68 164 74 69 165 72 65 161 2 60 150 75 65 166

+
+
+
+ + + + -1.942813 58.152271 74.325898 3.979280 54.659519 80.320370 3.951871 67.773108 72.573539 -2.362486 45.198312 75.733916 8.994844 66.338534 78.240529 1.159254 67.329551 64.017267 -4.200631 43.945750 66.874169 9.608259 73.804806 71.492750 -0.812009 60.689287 68.961903 -1.543450 57.498012 69.351399 -0.402078 60.517862 64.849481 -2.618657 51.496421 67.320268 -0.095333 66.480444 54.169967 -1.402171 57.294865 67.647851 -2.910673 49.573507 66.186601 0.607089 70.099064 52.535430 -0.466458 61.207664 62.676420 -4.050904 46.221778 63.103998 0.754464 72.855041 47.651742 -0.946609 63.575793 52.783433 -4.871226 48.973679 49.144719 1.236423 74.886076 47.557733 0.257494 69.854236 49.810137 -1.281084 60.725458 56.125166 -2.988876 49.984539 64.517710 -4.629358 47.404981 54.982721 5.247517 76.829315 63.508599 -1.257649 58.431247 61.562358 -2.085416 53.544659 64.903579 -5.450261 45.941869 50.603192 -4.455533 46.638494 58.357653 -2.065811 58.047881 54.847025 -1.978894 54.954776 62.696921 -5.152079 46.150690 52.927683 -5.520641 42.354169 58.101106 -3.469965 55.052756 48.475749 -5.548680 43.870783 54.388993 -4.615327 44.524427 61.665007 -2.591538 57.738267 50.615521 -5.366409 41.973799 60.414032 -1.931594 59.191657 53.506063 + + + + + + + + + + -0.868312 0.296424 0.397702 -0.612653 0.315815 0.724512 -0.690865 0.539100 0.481743 -0.870489 0.080970 0.485481 -0.612653 0.315815 0.724512 -0.635945 0.391539 0.665035 -0.895896 0.426483 0.124430 -0.983711 0.167567 0.065067 -0.698415 0.689399 0.192212 -0.895896 0.426483 0.124430 -0.972001 0.209889 -0.105640 -0.868312 0.296424 0.397702 -0.983711 0.167567 0.065067 -0.974757 0.223264 -0.001340 -0.966727 0.234168 0.102973 -0.937848 0.317043 0.141157 -0.900377 0.397015 0.178047 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 0 0 9 2 2 10 5 6 11 6 7 12 3 3 13 0 0 14 2 2 15 7 8 16 5 9 17 8 10 18 0 11 19 5 6 20 0 11 19 9 10 21 6 12 22 0 11 19 8 10 18 9 10 21 10 13 23 8 10 18 5 6 20 6 12 22 9 10 21 11 10 24 5 6 25 12 14 26 10 13 27 11 10 24 9 10 21 13 10 28 6 12 22 11 10 24 14 13 29 5 6 25 15 14 30 12 14 26 10 13 27 12 14 26 16 14 31 14 13 32 17 14 33 6 12 34 5 6 25 18 14 35 15 14 30 16 14 31 12 14 26 19 14 36 14 13 32 20 14 37 17 14 33 18 14 35 5 6 25 21 15 38 22 14 39 15 14 30 18 14 35 23 14 40 16 14 31 19 14 36 24 14 41 20 14 37 14 13 32 17 14 33 20 14 37 25 14 42 5 6 43 26 16 44 21 15 45 16 14 31 23 14 40 27 14 46 28 14 47 20 14 37 24 14 41 29 14 48 25 14 42 20 14 37 17 14 33 25 14 42 30 14 49 27 14 46 23 14 40 31 14 50 32 14 51 20 14 37 28 14 47 25 14 42 29 14 48 33 14 52 34 14 53 17 14 33 30 14 49 27 14 46 31 14 50 32 14 51 32 14 51 35 14 54 20 14 37 36 14 55 25 14 42 33 14 52 17 14 33 34 14 53 37 14 56 32 14 51 31 14 50 38 14 57 32 14 51 38 14 57 35 14 54 37 14 56 34 14 53 39 14 58 38 14 57 31 14 50 40 14 59

+
+
+
+ + + + 3.180635 64.543998 39.844169 2.827227 65.781887 42.742615 3.807585 67.740850 39.807963 1.863282 63.900078 45.670125 2.764492 69.035853 46.129759 2.185502 63.665217 43.873651 3.543456 69.448372 42.714881 3.783674 70.877400 42.894282 3.646434 72.581552 45.178124 4.531160 75.590515 43.704738 + + + + + + + + + + -0.962492 0.186528 -0.197020 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + -1.877756 42.704204 44.778960 -5.061413 41.201743 43.206949 -2.282367 42.070681 42.342337 -5.716718 41.351702 45.204314 0.106845 43.496832 45.031298 -1.531532 43.434899 47.817140 -4.464265 42.022614 46.226285 + + + + + + + + + + 0.344626 -0.920888 0.182205 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 33.644559 94.971591 41.322331 31.119980 94.540477 40.925878 31.920823 94.644105 40.840546 29.834972 94.580558 42.377656 34.393072 95.292670 42.671259 28.258215 94.504557 43.361429 33.576726 95.379652 43.985539 22.607208 94.190518 46.621779 26.269695 94.122808 42.780419 31.432729 95.179178 44.704323 21.976377 93.464959 42.586042 31.113578 95.411803 46.483688 22.862768 93.356808 41.071666 33.251116 95.975602 48.085927 26.347387 93.626088 39.543121 32.107592 95.553939 46.463874 24.892152 94.750740 48.063986 23.826415 93.162460 38.936140 29.950622 95.572115 48.587915 24.890472 93.283428 38.716231 33.073128 96.105335 49.078265 26.361664 95.378885 50.698176 30.083412 95.604708 48.671958 28.266996 95.544124 49.977095 32.260388 96.159803 50.182018 27.251341 95.597071 51.260077 + + + + + + + + + + 0.142873 -0.977772 0.153455 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 34.106156 83.547571 85.205042 36.820068 85.877952 82.129438 32.408856 84.610867 80.051293 + + + + + + + + + + -0.393987 0.865835 0.308388 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 2.132560 52.771909 48.840374 2.892713 51.730198 44.876203 3.290308 52.375052 42.134489 2.451255 50.629669 48.136377 2.371930 54.263560 46.569947 3.193445 49.844874 44.135397 3.593740 51.950698 40.556892 2.921144 48.824235 46.338340 3.475790 54.637450 39.753633 3.837428 47.465537 41.617817 3.097514 56.132286 41.177858 3.934291 49.995714 39.616909 2.698575 56.423067 43.402064 4.217960 48.854296 38.560363 1.962523 57.203388 47.368825 4.377973 45.649239 39.403022 2.111630 59.736123 45.053816 4.126335 45.832655 40.805983 + + + + + + + + + + 0.982127 0.092179 0.164106 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 8.797890 85.225574 43.743188 8.157920 82.718383 40.581179 9.724916 85.840333 41.495315 7.894267 84.201010 44.971707 6.855358 81.984600 44.033904 7.419345 82.246798 42.413284 5.991036 79.977576 42.884617 + + + + + + + + + + 0.845982 -0.487723 0.215500 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 5.844809 54.411414 83.945128 10.627111 63.741931 85.752361 8.994844 66.338534 78.240529 + + + + + + + + + + -0.866749 0.382180 0.320446 -0.871990 0.372086 0.318096 -0.906147 0.306236 0.291749 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 40.806302 26.697607 40.235642 37.597862 25.729833 39.248230 38.417624 26.049056 38.970670 41.865268 26.758359 42.466264 35.967157 25.086655 39.860496 32.281582 23.952298 38.893123 30.242105 23.444796 37.472610 40.550381 25.793914 46.242855 22.936072 21.364810 34.312824 31.185917 23.375242 40.371535 35.063098 25.051227 37.835140 27.577053 22.544861 37.359808 33.462256 23.755656 43.326861 26.226699 22.657177 33.117915 24.075494 21.252267 38.022975 29.401829 22.876328 39.533627 31.600425 23.152837 43.057458 36.388968 25.749526 36.046104 26.422173 21.999577 38.454517 37.948704 24.823561 46.808848 26.887713 23.061576 31.811704 26.540604 21.936033 39.221923 30.760149 22.584369 45.118476 39.287055 26.732007 36.140383 24.132251 21.113375 39.189245 34.352400 22.844024 52.290769 39.276071 26.998152 34.152827 28.306165 22.299272 41.012039 28.505972 21.900830 44.451269 26.717982 23.412898 28.795497 23.009225 20.306240 42.292669 27.291876 21.387319 45.162274 33.777643 25.480689 31.422042 26.936584 21.577952 42.860055 25.441405 20.649510 45.915603 38.319096 26.991116 31.784588 29.102818 24.171949 29.237083 24.886222 20.188326 47.907582 35.857645 26.294861 30.686858 29.592370 24.429782 28.576516 31.544228 21.981805 51.538316 32.473901 25.291020 29.521704 26.263278 20.360346 50.123280 31.248451 25.189471 27.170487 28.496665 20.224860 56.768847 26.331744 19.419423 57.224965 + + + + + + + + + + 0.046562 -0.982521 -0.180233 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 18.043781 40.181127 39.092152 18.777650 43.065806 40.424563 18.731583 42.373714 38.420710 17.713222 39.239467 39.836087 17.452570 38.876568 41.849165 18.296574 42.019243 42.724246 17.744246 40.266456 43.293244 18.965760 44.667343 44.005633 17.261826 39.092431 45.131385 19.117044 45.610585 45.590140 16.074909 35.597918 47.376628 16.585493 36.685563 44.849049 16.512779 37.065758 47.219687 18.379467 43.369630 46.724663 17.405963 40.276905 47.714943 17.922981 42.357304 48.834434 16.753336 38.279815 48.665304 17.087640 39.959303 50.645355 16.409078 37.800514 51.324086 16.787933 39.186188 51.622936 + + + + + + + + + + -0.957786 0.277812 -0.073932 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 55.195139 24.583042 86.717084 10.971933 39.455699 71.809096 40.392803 39.450852 85.221840 20.549136 35.246622 89.731484 32.475642 26.287706 93.278191 17.803847 30.529672 90.765489 + + + + + + + + + + 0.707582 0.706376 0.018989 0.186328 0.893589 -0.408389 0.327842 0.756498 0.565890 0.327842 0.756498 0.565890 0.040932 0.634613 0.771746 0.283238 0.370936 0.884411 0.201049 0.096674 0.974799 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 3 3 1 1 4 3 4 5 0 0 6 2 2 7 4 5 8 5 6 9 2 3 10 3 4 11

+
+
+
+ + + + -0.097873 22.900083 66.510187 -1.998022 35.460924 59.511007 -0.737718 24.202829 56.872352 11.586127 22.662345 75.898777 0.099062 30.824500 56.189074 4.140649 32.078103 69.259211 11.653777 16.705276 65.924813 1.557660 33.037606 54.290799 -0.114888 25.440489 56.083963 7.899310 27.436849 72.720362 3.542260 34.382793 70.235974 5.537574 15.979746 56.934349 20.850950 10.680519 85.015463 4.017619 33.652418 50.852964 0.981778 25.904961 54.565787 10.195904 25.928114 76.130096 6.534794 30.737424 73.040287 4.152282 35.721074 72.838490 18.968728 9.372805 65.550119 16.658295 22.820287 83.993123 24.594597 12.712092 74.427418 5.152903 35.908403 49.415902 6.186538 38.206771 79.588053 14.215705 7.280830 56.489828 15.216893 28.490306 84.828629 24.614250 12.705384 74.437178 1.682021 38.455650 61.710556 8.450054 30.847350 77.218794 6.381649 34.518247 76.403803 2.851595 42.338845 70.251014 13.186185 9.980132 47.973764 30.417875 -1.170773 47.942971 21.402973 26.129397 88.198471 39.845295 7.507102 82.000874 31.553181 8.130118 74.510840 2.378518 38.324347 60.259482 1.771828 39.045303 63.146031 19.382059 33.354426 84.010822 9.496831 14.474274 50.637897 16.409474 7.382693 50.111413 23.569820 2.823858 49.785484 22.694586 21.267178 87.489192 23.295617 13.617250 89.305049 28.229850 3.266375 65.648432 7.058167 39.953402 57.227444 17.803847 30.529672 90.765489 2.921728 41.577899 68.104775 10.638960 14.367139 48.482577 6.827169 19.230974 51.006600 14.641795 9.038184 47.971163 18.362303 5.913444 50.969071 25.746763 1.851979 47.951318 49.955547 7.458044 93.258883 7.760728 38.852674 53.173635 3.433846 40.997425 65.743347 8.738907 16.814670 49.716816 19.399718 5.649981 49.258407 30.450372 21.684785 98.352668 7.231785 40.131839 57.430592 8.323035 38.309186 50.831889 3.960467 40.898147 64.646194 4.500176 42.369731 67.730344 7.228050 20.241644 49.163912 22.406581 3.705056 49.249412 32.475642 26.287706 93.278191 55.195139 24.583042 86.717084 9.870603 41.248827 56.251498 5.540358 41.283765 63.182880 7.646547 24.676086 51.688079 56.706252 0.915257 82.637537 10.583355 40.751175 53.794790 5.309540 41.645842 64.515905 9.121656 25.697071 51.367610 8.959034 23.250760 49.930494 45.805533 7.478181 88.637742 7.533104 26.841639 53.128586 + + + + + + + + + + -0.743864 -0.442098 0.501214 -0.977024 0.160324 0.140430 -0.959191 -0.041834 -0.279649 -0.775287 -0.340834 0.531754 -0.977024 0.160324 0.140430 -0.743864 -0.442098 0.501214 -0.816149 0.043681 -0.576188 -0.775287 -0.340834 0.531754 -0.835656 -0.383580 0.393122 -0.775287 -0.340834 0.531754 -0.526651 -0.799840 0.287915 -0.526651 -0.799840 0.287915 -0.720263 -0.692655 -0.038076 -0.455086 -0.881382 0.126737 -0.775287 -0.340834 0.531754 -0.457534 -0.783279 0.420877 -0.657842 -0.179989 0.731333 -0.775287 -0.340834 0.531754 -0.170393 -0.954828 -0.243453 -0.725097 0.481553 -0.492281 -0.764564 -0.104854 0.635962 -0.568053 -0.812580 0.130498 -0.721887 0.012836 0.691892 -0.657842 -0.179989 0.731333 -0.375101 -0.918656 0.123980 -0.455086 -0.881382 0.126737 -0.483904 0.819714 -0.306441 -0.977024 0.160324 0.140430 -0.764564 -0.104854 0.635962 -0.842367 0.537699 0.036024 -0.720263 -0.692655 -0.038076 -0.701790 -0.665712 -0.253610 -0.426540 -0.900734 0.082108 -0.514016 -0.293342 0.806063 -0.721887 0.012836 0.691892 -0.299903 -0.908262 0.291749 -0.455086 -0.881382 0.126737 0.503633 0.653544 -0.565008 0.711028 -0.674761 -0.197832 -0.443797 -0.766220 0.464706 -0.479231 -0.718673 0.503832 -0.375101 -0.918656 0.123980 -0.443797 -0.766220 0.464706 -0.721887 0.012836 0.691892 -0.552523 0.574636 0.603748 -0.809341 -0.422621 -0.407871 -0.701790 -0.665712 -0.253610 -0.720263 -0.692655 -0.038076 -0.532947 -0.823030 -0.196444 -0.568053 -0.812580 0.130498 -0.362844 -0.021361 0.931605 -0.514016 -0.293342 0.806063 -0.273961 -0.440371 0.854996 -0.299903 -0.908262 0.291749 -0.479231 -0.718673 0.503832 -0.462553 -0.692529 0.553578 -0.747387 0.656812 0.100049 -0.760984 -0.484605 -0.431348 -0.071735 -0.519663 0.851355 -0.362844 -0.021361 0.931605 -0.273961 -0.440371 0.854996 -0.362844 -0.021361 0.931605 -0.071735 -0.519663 0.851355 -0.273961 -0.440371 0.854996 -0.706226 -0.045273 -0.706537 -0.263408 0.099494 0.959540 -0.071735 -0.519663 0.851355 -0.416627 0.077998 0.905725 0.643335 0.093433 0.759862 -0.483977 0.466946 -0.740086 -0.307518 -0.654478 0.690718 0.351049 -0.681021 0.642631 -0.071735 -0.519663 0.851355 -0.355811 -0.879619 0.315703 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 5 5 1 1 6 4 6 7 2 2 8 3 7 9 5 8 10 1 4 11 3 9 12 0 5 13 6 10 14 1 1 6 7 6 15 4 6 7 4 6 7 8 6 16 2 2 8 5 8 10 3 7 9 9 8 17 10 8 18 1 4 11 5 8 10 6 11 19 0 5 20 11 12 21 12 13 22 3 14 23 6 11 24 1 1 6 13 6 25 7 6 15 8 6 16 4 6 7 14 6 26 9 8 17 3 7 9 15 8 27 16 8 28 5 8 10 9 8 17 17 8 29 1 4 11 10 8 18 11 12 30 18 15 31 6 11 32 12 13 33 19 16 34 3 17 35 20 18 36 12 13 37 6 11 38 13 6 25 1 1 6 21 19 39 15 8 27 3 7 9 22 20 40 1 4 11 17 8 29 22 20 40 18 15 31 11 12 30 23 21 41 24 22 42 3 17 43 19 23 44 25 24 45 12 25 46 20 18 47 26 26 48 21 19 49 1 27 50 24 22 51 22 28 52 3 7 53 15 8 27 22 20 40 27 8 54 22 20 40 17 8 29 28 8 55 22 20 56 29 29 57 1 1 58 11 30 59 30 31 60 23 21 61 31 32 62 18 15 63 23 21 64 19 16 65 32 33 66 24 34 67 33 35 68 12 36 69 25 24 70 25 37 71 18 38 72 20 39 73 34 40 74 25 41 75 20 42 76 21 19 49 26 26 48 35 26 77 1 27 50 36 26 78 26 26 48 24 43 79 37 44 80 22 28 81 22 20 40 28 8 55 27 8 54 36 26 78 1 27 50 29 29 82 38 45 83 30 46 84 11 47 85 30 31 86 39 48 87 23 49 88 40 48 89 31 32 90 23 49 88 41 50 91 32 51 92 19 16 93 33 35 94 42 52 95 12 36 96 33 53 97 25 41 98 34 54 99 43 55 100 20 42 101 18 38 102 20 42 101 43 55 100 34 40 103 21 19 49 35 26 77 44 26 104 37 44 105 24 43 106 45 56 107 36 26 78 29 29 82 46 26 108 30 46 84 38 45 83 47 45 109 38 45 83 11 47 85 48 57 110 39 48 87 30 31 86 49 48 111 23 49 88 39 48 87 50 48 112 31 32 90 40 48 89 51 48 113 40 48 89 23 49 88 50 48 112 52 58 114 32 51 115 41 59 116 42 60 117 41 61 118 19 16 119 52 62 120 42 63 121 33 35 122 21 19 49 44 26 104 53 26 123 54 26 124 36 26 78 46 26 108 2 64 125 48 57 126 11 47 127 48 57 110 55 45 128 38 45 83 56 48 129 40 48 89 50 48 112 57 65 130 32 51 131 52 66 132 52 58 133 41 61 134 42 63 135 53 26 123 44 26 104 58 26 136 21 19 49 53 26 123 59 26 137 60 26 138 36 26 78 54 26 124 61 26 139 54 26 124 46 26 108 48 57 126 2 64 125 62 64 140 40 48 89 56 48 129 63 48 141 64 67 142 32 51 143 57 65 144 52 66 145 65 68 146 57 65 147 66 26 148 53 26 123 58 26 136 36 26 78 60 26 138 67 26 149 2 64 150 68 69 151 62 64 152 64 67 153 45 70 154 32 51 155 52 58 156 69 71 157 65 68 158 53 26 123 66 26 148 70 26 159 67 26 149 60 26 138 71 26 160 2 64 150 72 69 161 68 69 151 62 64 152 68 69 151 73 69 162 69 71 163 52 72 164 74 73 165 72 69 161 2 64 150 75 69 166

+
+
+
+ + + + 49.016583 -4.569965 77.655634 52.847552 2.030465 84.309498 38.292035 1.898421 75.710515 63.398379 -4.935407 79.218535 39.845295 7.507102 82.000874 38.806012 -1.197871 66.213054 64.802933 -6.965993 69.384215 31.553181 8.130118 74.510840 46.191801 -3.334463 71.701599 49.739699 -4.121612 72.133940 46.378902 -2.878122 67.136812 56.409634 -5.268556 69.879384 39.758220 -2.583848 55.282551 49.964091 -3.963222 70.243002 58.546280 -5.577784 68.621015 35.736207 -1.832220 53.468215 45.613739 -2.954927 64.724714 62.275444 -6.817437 65.199325 32.676004 -1.689995 48.047321 42.988901 -3.506233 53.743498 59.227265 -7.749298 49.704525 30.417875 -1.170773 47.942971 36.010668 -2.218365 50.443139 46.155287 -3.855404 57.452822 58.090651 -5.667773 66.768545 60.966602 -7.468674 56.184708 28.229850 3.266375 65.648432 48.701618 -3.811613 63.488104 54.132013 -4.692544 67.196860 62.596979 -8.368516 51.323431 61.816035 -7.269793 59.930882 49.133406 -4.705680 56.034086 52.565996 -4.585236 64.747470 62.362882 -8.039161 53.903615 66.579774 -8.418834 59.646115 52.468795 -6.241043 48.961968 64.896591 -8.461709 55.525670 64.163831 -7.430778 63.602045 49.481144 -5.286823 51.337116 67.000780 -8.244694 62.213463 47.862806 -4.565566 54.545617 + + + + + + + + + + -0.290355 -0.870360 0.397702 -0.311530 -0.614842 0.724512 -0.534264 -0.694612 0.481743 -0.074891 -0.871033 0.485481 -0.311530 -0.614842 0.724512 -0.387090 -0.638663 0.665035 -0.534264 -0.694612 0.481743 -0.420218 -0.898851 0.124430 -0.160695 -0.984857 0.065067 -0.684506 -0.703211 0.192212 -0.420218 -0.898851 0.124430 -0.203099 -0.973443 -0.105640 -0.290355 -0.870360 0.397702 -0.420218 -0.898851 0.124430 -0.160695 -0.984857 0.065067 -0.216453 -0.976292 -0.001340 -0.420218 -0.898851 0.124430 -0.227413 -0.968339 0.102973 -0.310488 -0.940038 0.141157 -0.390719 -0.903127 0.178047 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 6 8 0 0 9 2 2 10 5 7 11 6 8 12 3 3 13 0 0 14 2 2 15 7 9 16 5 10 17 8 11 18 0 12 19 5 13 20 0 12 19 9 11 21 6 14 22 0 12 19 8 11 18 9 11 21 10 15 23 8 11 18 5 13 20 6 14 22 9 11 21 11 11 24 5 16 25 12 17 26 10 15 27 11 11 24 9 11 21 13 11 28 6 14 22 11 11 24 14 15 29 5 16 25 15 17 30 12 17 26 10 15 27 12 17 26 16 17 31 14 15 32 17 17 33 6 14 34 5 16 25 18 17 35 15 17 30 16 17 31 12 17 26 19 17 36 14 15 32 20 17 37 17 17 33 18 17 35 5 16 25 21 18 38 22 17 39 15 17 30 18 17 35 23 17 40 16 17 31 19 17 36 24 17 41 20 17 37 14 15 32 17 17 33 20 17 37 25 17 42 5 16 43 26 19 44 21 18 45 16 17 31 23 17 40 27 17 46 28 17 47 20 17 37 24 17 41 29 17 48 25 17 42 20 17 37 17 17 33 25 17 42 30 17 49 27 17 46 23 17 40 31 17 50 32 17 51 20 17 37 28 17 47 25 17 42 29 17 48 33 17 52 34 17 53 17 17 33 30 17 49 27 17 46 31 17 50 32 17 51 32 17 51 35 17 54 20 17 37 36 17 55 25 17 42 33 17 52 17 17 33 34 17 53 37 17 56 32 17 51 31 17 50 38 17 57 32 17 51 38 17 57 35 17 54 37 17 56 34 17 53 39 17 58 38 17 57 31 17 50 40 17 59

+
+
+
+ + + + 41.882236 1.067393 39.380915 40.510951 0.665527 42.598190 38.328958 1.738518 39.340726 42.607178 -0.389843 45.847726 36.899624 0.570677 46.357920 42.865371 -0.030367 43.853640 36.435702 1.432110 42.567406 34.847658 1.687671 42.766540 32.957159 1.522133 45.301605 29.610436 2.480838 43.666147 + + + + + + + + + + -0.179804 -0.963771 -0.197020 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + 66.163015 -4.378043 44.858532 67.855377 -7.900173 43.113600 66.869345 -4.822241 42.153881 67.694005 -8.628706 45.330676 65.267841 -2.181331 45.138628 65.349280 -3.999406 48.230913 66.939605 -7.243717 46.465064 + + + + + + + + + + 0.918460 0.351046 0.182205 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 7.872360 34.645736 41.021675 8.370448 31.846862 40.581612 8.249219 32.734972 40.486893 8.335918 30.420227 42.193085 7.510171 35.474077 42.518984 8.432495 28.670658 43.285074 7.419949 34.567280 43.977836 8.824861 22.400627 46.904062 8.871636 26.466413 42.640153 7.659084 32.189056 44.775686 9.635100 21.706045 42.424393 7.403350 31.833004 46.750781 9.748276 22.690752 40.743437 6.760984 34.201244 48.529266 9.422379 26.556499 39.046751 7.237880 32.935231 46.728787 8.185323 24.932512 48.504911 9.956529 23.761880 38.373003 7.234420 30.540912 49.086473 9.814013 24.942018 38.128903 6.618363 34.002677 49.630761 7.476712 26.558763 51.428863 7.197213 30.688053 49.179761 7.278535 28.672350 50.628463 6.564203 33.100136 50.855927 7.227636 27.544589 52.052572 + + + + + + + + + + 0.976751 0.149695 0.153455 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 20.549136 35.246622 89.731484 17.941446 38.240933 86.317564 19.382059 33.354426 84.010822 + + + + + + + + + + -0.863064 -0.400022 0.308388 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 54.957059 -0.004717 49.366702 56.107439 0.847104 44.966473 55.388588 1.283427 41.923170 57.332417 0.365626 48.585266 53.299512 0.249418 46.846528 58.197767 1.195518 44.144178 55.857257 1.623517 40.172037 59.332759 0.901180 46.589445 52.875949 1.471775 39.280420 60.833777 1.928760 41.349664 51.219654 1.040315 40.861309 58.024598 2.016668 39.128657 50.899986 0.595251 43.330178 59.289343 2.340379 37.955891 50.039555 -0.227794 47.733284 62.845629 2.542825 38.891242 47.227132 -0.081916 45.163623 62.643993 2.262092 40.448529 + + + + + + + + + + -0.099033 0.981459 0.164106 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 18.882717 7.142128 43.708826 21.670591 6.451208 40.198996 18.193168 8.166338 41.213687 20.026957 6.147071 45.072482 22.495164 5.011086 44.031521 22.199760 5.635065 42.232633 24.729604 4.067265 42.755812 + + + + + + + + + + 0.481805 0.849367 0.215500 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 53.108485 4.103074 88.332979 42.714805 9.338996 90.339008 39.845295 7.507102 82.000874 + + + + + + + + + + -0.376119 -0.869396 0.320446 -0.365990 -0.874567 0.318096 -0.299903 -0.908262 0.291749 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 66.922236 74.359449 46.166755 69.934053 74.569268 45.277195 69.150167 74.452445 45.027142 65.979796 74.517440 48.176325 71.495023 74.808830 45.828786 74.959258 75.069761 44.957279 76.852259 75.108543 43.677537 67.327437 75.102274 51.578659 83.685855 75.477602 40.830883 76.036990 75.358025 46.289182 72.296322 74.659510 44.004141 79.373163 75.367221 43.575914 73.961269 75.478168 48.951638 80.537049 74.999028 39.754389 82.707321 75.803894 44.173362 77.703915 75.440236 45.534310 75.717230 75.636144 48.708932 70.992152 74.310680 42.392397 80.496575 75.615753 44.562138 69.806720 75.435465 52.088562 79.875624 74.775692 38.577622 80.405215 75.695216 45.253495 76.568886 75.967837 50.565705 68.250048 74.025996 42.477333 82.685181 75.937241 45.224056 73.361222 76.456716 57.027230 68.206577 73.789992 40.686742 78.781647 75.728500 46.866213 78.685640 76.118416 49.964618 79.954614 74.433174 35.860319 83.832873 76.422171 48.019933 79.854732 76.327217 50.605163 73.339893 74.025654 38.226575 80.128812 76.088830 48.531091 81.627657 76.606050 51.283838 69.048697 73.605168 38.553193 77.708003 74.242336 36.258144 82.207441 76.900395 53.078413 71.350081 73.725548 37.564247 77.226463 74.113537 35.663038 76.000330 76.653694 56.349345 74.523104 73.932062 36.514559 80.963346 77.024125 55.074538 75.619946 73.776683 34.396346 79.028329 77.588920 61.061535 81.090999 77.864403 61.472452 + + + + + + + + + + 0.172271 0.968421 -0.180233 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 84.228186 57.970756 45.136585 83.007711 55.583004 46.336954 83.186317 56.181822 44.531681 84.706535 58.732040 45.806796 85.007954 58.998827 47.620380 83.639229 56.406403 48.408741 84.474300 57.836009 48.921352 82.522797 54.213579 49.563144 85.132440 58.771117 50.577335 82.201628 53.415123 50.990628 86.872643 61.604191 52.600077 86.207002 60.750590 50.322978 86.194997 60.402071 52.458688 83.296878 55.236616 52.012721 84.769401 57.759310 52.904865 83.899961 56.034846 53.913415 85.741348 59.383520 53.761046 85.112443 57.974792 55.544875 86.139448 59.735880 56.156345 85.530048 58.594164 56.425579 + + + + + + + + + + 0.872436 -0.483104 -0.073932 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 54.703510 79.089036 88.041929 90.585686 57.196561 74.611308 64.740039 63.073026 86.694862 83.012093 62.805835 90.757605 74.322642 73.056809 93.952836 86.365337 66.401795 91.689140 + + + + + + + + + + -0.846495 -0.532058 0.018989 -0.379671 -0.830102 -0.408389 -0.487295 -0.665066 0.565890 -0.487295 -0.665066 0.565890 -0.180512 -0.609774 0.771746 -0.358380 -0.298967 0.884411 -0.217471 -0.049730 0.974799 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 3 3 1 1 4 3 4 5 0 0 6 2 2 7 4 5 8 5 6 9 2 3 10 3 4 11

+
+
+
+ + + + 103.615043 69.531426 69.837517 102.777290 58.117308 63.531949 103.917136 68.259238 61.154783 93.397949 72.072329 78.295706 101.860373 62.609029 60.539217 98.059577 62.314397 72.314115 94.527508 77.319194 69.310153 100.137255 60.955914 58.829059 103.122943 67.296250 60.444522 95.683907 67.142001 75.432269 98.125268 60.170262 73.194082 100.045483 76.735828 61.210636 87.650174 84.447714 86.508937 97.853437 60.906785 55.731911 102.066803 67.107093 59.076796 93.967452 68.925830 78.504102 96.223879 63.970056 75.720490 97.322244 59.116321 75.538690 89.564741 85.220879 68.972591 88.910459 72.945945 85.587910 83.955843 83.410158 76.970158 96.405796 59.151468 54.437260 95.038996 57.338628 81.619378 94.157878 86.110035 60.810166 89.045052 67.677066 86.340618 83.939916 83.419974 76.978950 98.946598 56.220914 65.513525 94.519358 64.255758 79.484911 95.603792 60.617987 78.750684 97.144052 53.042916 73.207631 94.523561 83.533178 53.138037 81.610944 96.768721 53.110295 84.081723 70.985855 89.376511 71.596789 91.026745 83.793091 78.757168 88.824367 77.045312 98.360922 56.475281 64.206251 98.750010 55.720821 66.806745 84.415055 64.235213 85.603855 96.867706 78.848646 55.538156 92.210291 86.458408 55.063847 86.829748 91.892561 54.770217 83.917490 75.515174 88.737522 84.916350 82.355697 90.373429 82.647528 92.433914 69.061161 93.924642 55.978162 61.474685 86.365337 66.401795 91.689140 97.234319 53.725415 71.274082 95.885715 79.170726 53.596427 98.263634 74.136977 55.870322 93.432796 84.651221 53.135693 90.787958 88.138934 55.836512 85.111255 93.180872 53.117815 62.724593 93.087781 93.935441 93.527131 57.085392 57.822606 96.900275 54.337584 69.146670 97.066425 76.641301 54.708354 89.929161 88.577451 54.295373 77.020581 76.696304 98.524437 93.736502 55.856056 61.657701 93.141614 57.675086 55.712924 96.457447 54.529911 68.158244 95.689588 53.344828 70.936757 97.709732 73.329104 54.210242 87.675787 90.886242 54.287270 74.322642 73.056809 93.952836 54.703510 79.089036 88.041929 91.195323 55.401457 60.595455 94.992525 54.506476 66.839943 96.456992 69.516918 56.484266 58.099908 100.183099 84.366661 90.668489 55.980912 58.382204 95.123035 54.142317 68.040866 94.957308 68.914391 56.195556 95.588440 71.031049 54.900857 66.366415 92.241776 89.772251 96.124424 67.591809 57.782021 + + + + + + + + + + 0.823325 0.266310 0.501214 0.917225 -0.372798 0.140430 0.944622 -0.171713 -0.279649 0.831532 0.160600 0.531754 0.917225 -0.372798 0.140430 0.786190 -0.223412 -0.576188 0.831532 0.160600 0.531754 0.899871 0.188910 0.393122 0.917225 -0.372798 0.140430 0.831532 0.160600 0.531754 0.690767 0.663284 0.287915 0.690767 0.663284 0.287915 0.823325 0.266310 0.501214 0.855821 0.515869 -0.038076 0.639046 0.758655 0.126737 0.855821 0.515869 -0.038076 0.619698 0.662448 0.420877 0.681370 0.029772 0.731333 0.831532 0.160600 0.531754 0.377699 0.893350 -0.243453 0.600390 -0.630231 -0.492281 0.768794 -0.067140 0.635962 0.733962 0.666535 0.130498 0.701104 -0.172450 0.691892 0.569306 0.812724 0.123980 0.639046 0.758655 0.126737 0.290272 -0.906552 -0.306441 0.768794 -0.067140 0.635962 0.702307 -0.710962 0.036024 0.855821 0.515869 -0.038076 0.831838 0.493688 -0.253610 0.615497 0.783851 0.082108 0.566232 0.172172 0.806063 0.701104 -0.172450 0.691892 0.493674 0.819248 0.291749 0.639046 0.758655 0.126737 -0.635909 -0.525724 -0.565008 -0.543866 0.815520 -0.197832 0.602523 0.648857 0.464706 0.626543 0.594641 0.503832 0.569306 0.812724 0.123980 0.602523 0.648857 0.464706 0.411483 -0.682767 0.603748 0.882859 0.232811 -0.407871 0.831838 0.493688 -0.253610 0.855821 0.515869 -0.038076 0.702044 0.684503 -0.196444 0.358560 -0.059558 0.931605 0.364717 0.368732 0.854996 0.493674 0.819248 0.291749 0.626543 0.594641 0.503832 0.604487 0.572841 0.553578 0.583299 -0.806073 0.100049 0.849437 0.303967 -0.431348 0.185083 0.490856 0.851355 0.358560 -0.059558 0.931605 0.364717 0.368732 0.854996 0.185083 0.490856 0.851355 0.364717 0.368732 0.854996 0.493674 0.819248 0.291749 0.698706 -0.112315 -0.706537 0.234819 -0.155379 0.959540 0.185083 0.490856 0.851355 0.388994 -0.168363 0.905725 0.185083 0.490856 0.851355 -0.648048 0.051418 0.759862 0.368499 -0.562566 -0.740086 0.444875 0.570083 0.690718 -0.191446 0.741872 0.642631 0.185083 0.490856 0.851355 0.541847 0.778931 0.315703 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 1 4 0 0 5 1 4 6 4 5 7 2 2 8 3 6 9 5 7 10 1 8 11 3 9 12 0 0 13 6 10 14 1 4 6 7 5 15 4 5 7 4 5 7 8 5 16 2 2 8 5 7 10 3 6 9 9 7 17 10 7 18 1 8 11 5 7 10 6 11 19 0 12 20 11 13 21 12 14 22 3 9 23 6 11 24 1 4 6 13 5 25 7 5 15 8 5 16 4 5 7 14 5 26 9 7 17 3 6 9 15 7 27 16 7 28 5 7 10 9 7 17 17 7 29 1 8 11 10 7 18 11 15 30 18 16 31 6 11 32 12 14 33 19 17 34 3 18 35 20 19 36 12 14 37 6 11 38 13 5 25 1 4 6 21 20 39 15 7 27 3 6 9 22 21 40 1 8 11 17 7 29 22 21 40 18 16 31 11 15 30 23 22 41 24 23 42 3 18 43 19 17 44 25 24 45 12 25 46 20 19 47 26 26 48 21 20 49 1 1 50 24 23 51 22 27 52 3 6 53 15 7 27 22 21 40 27 7 54 22 21 40 17 7 29 28 7 55 22 21 56 29 28 57 1 4 58 11 29 59 30 30 60 23 22 61 31 31 62 18 16 63 23 22 64 19 17 65 32 32 66 24 33 67 33 34 68 12 35 69 25 24 70 25 36 71 18 37 72 20 38 73 34 39 74 25 40 75 20 41 76 21 20 49 26 26 48 35 26 77 1 1 50 36 26 78 26 26 48 24 23 79 37 42 80 22 27 81 22 21 40 28 7 55 27 7 54 36 26 78 1 1 50 29 28 82 38 43 83 30 44 84 11 45 85 30 30 86 39 46 87 23 22 88 40 46 89 31 31 90 23 22 88 41 47 91 32 32 92 19 17 93 33 34 94 42 48 95 12 35 96 33 49 97 25 40 98 34 50 99 43 51 100 20 41 101 18 37 102 20 41 101 43 51 100 34 39 103 21 20 49 35 26 77 44 26 104 37 42 105 24 23 106 45 52 107 36 26 78 29 28 82 46 26 108 30 44 84 38 43 83 47 43 109 38 43 83 11 45 85 48 53 110 39 46 87 30 30 86 49 46 111 23 22 88 39 46 87 50 46 112 31 31 90 40 46 89 51 46 113 40 46 89 23 22 88 50 46 112 52 54 114 32 32 115 41 55 116 42 56 117 41 55 118 19 17 119 52 57 120 42 58 121 33 59 122 21 20 49 44 26 104 53 26 123 54 26 124 36 26 78 46 26 108 2 60 125 48 53 126 11 45 127 48 53 110 55 43 128 38 43 83 56 46 129 40 46 89 50 46 112 57 61 130 32 32 131 52 62 132 52 54 133 41 55 134 42 58 135 53 26 123 44 26 104 58 26 136 21 20 49 53 26 123 59 26 137 60 26 138 36 26 78 54 26 124 61 26 139 54 26 124 46 26 108 48 53 126 2 60 125 62 60 140 40 46 89 56 46 129 63 46 141 64 63 142 32 32 143 57 61 144 52 64 145 65 65 146 57 61 147 66 26 148 53 26 123 58 26 136 36 26 78 60 26 138 67 26 149 2 60 150 68 66 151 62 60 152 64 63 153 45 67 154 32 32 155 52 54 156 69 68 157 65 65 158 53 26 123 66 26 148 70 26 159 67 26 149 60 26 138 71 26 160 2 60 150 72 66 161 68 66 151 62 60 152 68 66 151 73 66 162 69 68 163 52 69 164 74 70 165 72 66 161 2 60 150 75 66 166

+
+
+
+ + + + 65.950195 103.467131 79.878460 61.267238 98.433204 85.872932 74.080803 95.644023 78.126101 53.388540 106.658686 81.286479 71.596789 91.026745 83.793091 74.247266 98.466742 69.569830 52.559912 108.722922 72.426731 78.757168 88.824367 77.045312 68.185204 101.817920 74.514465 65.225440 103.217577 74.903961 67.929751 101.454362 70.402044 59.594737 105.556456 72.872830 73.687370 99.874396 59.722529 64.996694 103.123216 73.200413 57.779387 106.254577 71.739164 77.070742 98.411314 58.087993 68.617287 101.369115 68.228982 54.750694 108.087944 68.656560 79.730782 97.675571 53.204305 71.033277 101.329544 58.335995 57.614552 108.298200 54.697281 81.610944 96.768721 53.110295 76.906695 98.805327 55.362699 68.321258 102.268285 61.677728 58.197624 106.242692 70.070272 56.030511 108.398828 60.535284 82.647528 92.433914 69.061161 66.075533 102.738045 67.114920 61.480689 104.595825 70.456141 54.777806 109.514763 56.155755 55.244578 108.393650 63.910215 65.874652 103.609676 60.399588 62.835037 104.188986 68.249484 54.917726 109.178696 58.480245 51.288912 110.353907 63.653668 63.250917 105.624233 54.028311 52.776168 110.055622 59.941556 53.214140 109.003681 67.217569 65.685152 104.189624 56.168083 50.884296 110.284953 65.966595 66.962924 103.232981 59.058625 + + + + + + + + + + 0.475967 0.784403 0.397702 0.440006 0.530544 0.724512 0.674877 0.558985 0.481743 0.266006 0.832796 0.485481 0.440006 0.530544 0.724512 0.518965 0.537033 0.665035 0.608914 0.783416 0.124430 0.374895 0.924781 0.065067 0.823291 0.534084 0.192212 0.413716 0.904256 -0.105640 0.475967 0.784403 0.397702 0.608914 0.783416 0.124430 0.374895 0.924781 0.065067 0.427370 0.904076 -0.001340 0.608914 0.783416 0.124430 0.436296 0.893892 0.102973 0.511036 0.847889 0.141157 0.581096 0.794120 0.178047 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 0 0 9 2 2 10 5 6 11 6 7 12 3 3 13 0 0 14 2 2 15 7 8 16 5 6 17 8 9 18 0 10 19 5 11 20 0 10 19 9 9 21 6 12 22 0 10 19 8 9 18 9 9 21 10 13 23 8 9 18 5 11 20 6 12 22 9 9 21 11 9 24 5 14 25 12 15 26 10 13 27 11 9 24 9 9 21 13 9 28 6 12 22 11 9 24 14 13 29 5 14 25 15 15 30 12 15 26 10 13 27 12 15 26 16 15 31 14 13 32 17 15 33 6 12 34 5 14 25 18 15 35 15 15 30 16 15 31 12 15 26 19 15 36 14 13 32 20 15 37 17 15 33 18 15 35 5 14 25 21 16 38 22 15 39 15 15 30 18 15 35 23 15 40 16 15 31 19 15 36 24 15 41 20 15 37 14 13 32 17 15 33 20 15 37 25 15 42 5 14 43 26 17 44 21 16 45 16 15 31 23 15 40 27 15 46 28 15 47 20 15 37 24 15 41 29 15 48 25 15 42 20 15 37 17 15 33 25 15 42 30 15 49 27 15 46 23 15 40 31 15 50 32 15 51 20 15 37 28 15 47 25 15 42 29 15 48 33 15 52 34 15 53 17 15 33 30 15 49 27 15 46 31 15 50 32 15 51 32 15 51 35 15 54 20 15 37 36 15 55 25 15 42 33 15 52 17 15 33 34 15 53 37 15 56 32 15 51 31 15 50 38 15 57 32 15 51 38 15 57 35 15 54 37 15 56 34 15 53 39 15 58 38 15 57 31 15 50 40 15 59

+
+
+
+ + + + 71.092632 97.090672 45.396731 72.377533 97.170018 48.295177 74.080280 95.791870 45.360525 70.746615 98.515566 51.222687 75.569062 96.532548 51.682321 70.448040 98.251296 49.426213 75.804687 95.683172 48.267443 77.148796 95.141696 48.446844 78.842664 94.909792 50.730686 81.591453 93.399574 49.257300 + + + + + + + + + + 0.388857 0.899985 -0.197020 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + 50.848525 106.720837 50.331522 50.064758 110.152858 48.759511 50.316665 107.252050 47.894899 50.351935 110.760675 50.756876 51.196499 104.612327 50.583860 51.487829 106.225784 53.369702 50.738251 109.393371 51.778847 + + + + + + + + + + -0.973409 -0.138839 0.182205 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 94.268738 60.803522 46.874893 94.389797 63.361785 46.478440 94.319038 62.557371 46.393108 94.704879 64.608209 47.930218 94.421594 60.003524 48.223821 94.969237 66.164505 48.913992 94.681846 60.782149 49.538101 95.875994 71.751122 52.174341 95.023398 68.188612 48.332982 94.946440 62.919179 50.256885 95.302822 72.523039 48.138604 95.242171 63.180933 52.036250 95.006855 71.680551 46.624228 95.333813 60.972191 53.638489 94.521583 68.219395 45.095683 95.167542 62.179585 52.016436 95.932489 69.399181 53.616548 94.610112 70.781116 44.488702 95.648471 64.282335 54.140477 94.499767 69.715905 44.268793 95.498740 61.118169 54.630827 96.230425 67.829064 56.250738 95.651789 64.145644 54.224520 95.982668 65.932696 55.529657 95.726462 61.900253 55.734580 96.252477 66.913289 56.812639 + + + + + + + + + + -0.985643 0.070422 0.153455 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 83.012093 62.805835 90.757605 84.705338 59.654817 87.682000 84.415055 64.235213 85.603855 + + + + + + + + + + 0.930240 0.198871 0.308388 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 59.820217 100.642181 54.392936 58.639576 100.123453 50.428765 59.184009 99.596660 47.687051 57.659516 100.790935 53.688939 61.225670 100.088085 52.122509 56.733654 100.234582 49.687959 58.704397 99.391430 46.109454 55.795297 100.719698 51.890902 61.353801 98.929688 45.306196 54.271535 100.116550 47.170379 62.894994 98.978146 46.730420 56.721889 99.478628 45.169472 63.264658 99.305337 48.954626 55.546184 99.446678 44.112926 64.184833 99.856657 52.921388 52.381534 99.978637 44.955584 66.626466 99.167160 50.606378 52.614706 100.185020 46.358545 + + + + + + + + + + -0.120869 -0.979010 0.164106 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 90.085531 87.163390 49.295750 87.774251 88.326814 46.133742 90.486883 86.125979 47.047878 89.278907 88.265944 50.524269 87.337291 89.756559 49.586466 87.472265 89.149425 47.965847 85.562687 91.031697 48.437179 + + + + + + + + + + -0.658008 -0.721516 0.215500 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 60.624326 96.664471 89.497690 68.710254 89.990137 91.304923 71.596789 91.026745 83.793091 + + + + + + + + + + 0.559386 0.764462 0.320446 0.550654 0.771748 0.318096 0.493674 0.819248 0.291749 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+
+ + + + + + 165.101968 + 123.826476 + 1.000000 + 1000.000000 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -0.188510 -0.112956 0.975554 331.386330 + 0.982071 -0.021682 0.187259 103.346482 + 0.000000 0.993363 0.115018 91.817232 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + +
diff --git a/wild_visual_navigation_sim/Media/models/OliveTree2.dae b/wild_visual_navigation_sim/Media/models/OliveTree2.dae new file mode 100644 index 00000000..fd294c0d --- /dev/null +++ b/wild_visual_navigation_sim/Media/models/OliveTree2.dae @@ -0,0 +1,4263 @@ + + + + + Google SketchUp 7.0.8657 + + 2010-03-16T17:47:46Z + 2010-03-16T17:47:46Z + + Z_UP + + + + textures/texture0.png + + + textures/texture0.jpg + + + + + + + + + + + + + + + + material_0_1_0-image + + + + + material_0_1_0-image-surface + + + + + + 0.000000 0.000000 0.000000 1 + + + 0.000000 0.000000 0.000000 1 + + + + + + 1 1 1 1 + + + 0.010000 + + + + + + 1 + + + + + + + + + material_1_2_0-image + + + + + material_1_2_0-image-surface + + + + + + 0.000000 0.000000 0.000000 1 + + + 0.000000 0.000000 0.000000 1 + + + + + + 1 1 1 1 + + + 0.050000 + + + + + + 1 + + + + + + + + + + 36.497733 58.568960 66.975837 38.437084 54.949824 65.766035 37.855155 55.857094 65.425962 36.168312 59.826122 69.708851 39.604490 53.147023 66.516199 41.859466 48.995164 65.330948 42.962914 46.668549 63.590500 37.645094 58.480437 74.336026 47.230423 38.397336 59.719051 42.817883 47.818961 67.142336 39.865982 52.069779 64.034681 44.689191 43.685621 63.452292 41.809714 50.460838 70.763277 44.881316 42.036574 58.255018 47.089655 39.793097 64.264822 43.849168 45.797002 66.115710 42.984620 48.370668 70.433197 38.706195 53.493109 61.842709 45.623788 42.430604 64.793557 39.440600 55.590680 75.029494 44.235530 42.732553 56.654616 45.671428 42.588234 65.733803 43.870874 47.499121 72.958408 36.824340 56.735933 61.958222 47.242726 39.894902 65.693765 42.689063 51.750722 81.746082 36.507346 56.658288 59.523019 44.807585 44.620799 67.927098 45.237745 44.957270 72.140929 43.854678 42.443615 52.959083 48.484107 38.741555 69.496158 46.148523 43.623437 73.012071 39.661480 50.421875 56.177192 46.005606 43.150546 70.191333 47.482808 41.579601 73.935068 36.747552 55.510613 56.621392 42.365471 45.124107 53.500125 48.171165 41.024484 76.375691 38.179893 52.722896 55.276426 41.937242 45.649644 52.690781 44.404702 48.586767 80.824159 40.205015 48.901945 53.848850 47.631073 42.636742 79.090420 40.623755 47.454707 50.968080 47.252928 45.351969 87.232736 48.744590 42.946842 87.791584 + + + + + + + + + + 0.953934 0.239846 -0.180233 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 25.815819 27.964599 65.574804 22.173443 28.147417 67.207307 23.015838 28.259696 64.752136 27.026857 27.795626 66.486292 27.525841 27.570454 68.952765 23.546924 27.823071 70.024938 25.785874 27.584176 70.722088 20.204356 27.985495 71.594925 27.312771 27.289083 72.974225 19.034835 27.938766 73.536303 31.797307 26.709817 75.725153 30.367337 27.059654 72.628299 29.928313 26.880248 75.532865 21.904963 27.595600 74.926349 25.855248 27.175350 76.139665 23.231370 27.292496 77.511294 28.411910 26.875155 77.304072 26.313794 26.869942 79.730079 29.070945 26.577758 80.561678 27.314928 26.697207 80.927837 + + + + + + + + + + -0.083017 -0.993802 -0.073932 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 35.552824 76.362810 123.926072 28.399719 19.646615 105.660429 21.280549 54.983812 122.094061 31.135687 32.168675 127.619391 39.007522 48.662664 131.964906 37.465834 30.013763 128.886280 + + + + + + + + + + -0.832299 0.554002 0.018989 -0.912788 0.006028 -0.408389 -0.806373 0.171846 0.565890 -0.630183 -0.085311 0.771746 -0.419602 0.204332 0.884411 -0.134506 0.177974 0.974799 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 2 3 1 1 4 3 3 5 0 0 6 2 2 7 4 4 8 5 5 9 2 2 10 3 3 11

+
+
+
+ + + + 50.964720 10.360544 99.168073 36.338652 5.036445 90.592500 49.555007 9.276563 87.359554 48.420686 24.451220 110.671209 41.399382 8.677979 86.522384 38.914969 13.228548 102.536245 55.559057 25.975124 98.450858 38.388089 9.893874 84.196569 47.917678 9.724885 86.393599 43.579107 18.866894 106.776935 36.291828 11.951714 103.733000 57.911648 18.804945 87.435513 60.567787 38.480460 121.841203 37.053927 12.699524 79.984448 47.094238 10.929556 84.533492 44.835001 21.990603 110.954628 39.945393 16.428722 107.168916 34.536750 12.360285 106.921668 62.594248 36.536507 97.991772 47.002637 30.504914 120.588606 57.221144 42.484782 108.868464 34.069432 13.516717 78.223723 31.058650 14.201560 115.191404 66.257883 31.334497 86.890875 40.541716 27.400578 121.612290 57.224440 42.510012 108.880422 31.850614 8.731124 93.287443 39.349539 18.702429 112.288528 35.441512 15.329167 111.289980 26.903450 9.195433 103.751427 63.265201 29.444287 76.456779 72.484948 52.840927 76.419050 41.879180 35.402140 125.741104 59.779278 62.062196 118.147653 61.039143 51.952049 108.970674 31.839641 9.599453 91.509551 31.120660 8.696189 95.046223 33.690952 31.225196 120.610292 58.760967 23.924810 79.720942 65.604264 33.944662 79.075881 69.345612 43.648651 78.676544 47.406166 38.130943 124.872079 56.448580 40.705432 127.096912 67.685584 49.138428 98.112229 28.749768 14.825443 87.794621 37.465834 30.013763 128.886280 27.800402 9.463949 101.121801 58.613047 25.322512 77.080190 53.694444 19.566451 80.172687 64.044019 31.420667 76.453592 66.895984 36.645929 80.126705 69.985688 46.498640 76.429278 57.389751 74.217025 131.941249 29.901657 15.935825 82.827793 28.373560 10.219606 98.228521 56.133578 22.447718 78.592410 66.961181 37.955723 78.030756 45.026343 47.344918 138.182283 28.493410 14.990755 88.043523 30.418239 16.742804 79.958626 28.365263 10.876148 96.884261 26.467110 11.167986 100.663039 52.383492 19.803172 77.914979 68.568953 42.038136 78.019736 39.007522 48.662664 131.964906 35.552824 76.362810 123.926072 26.512792 17.889608 86.598867 27.519506 12.680295 95.091372 46.956142 19.231899 81.007652 63.613135 83.909476 118.927709 26.937889 18.866180 83.588847 27.140531 12.315383 96.724627 45.372651 20.756326 80.615005 48.350183 21.153444 78.854215 58.370595 69.227761 126.279311 44.382672 18.571205 82.772598 + + + + + + + + + + 0.580406 -0.641805 0.501214 0.035955 -0.989437 0.140430 0.230600 -0.931998 -0.279649 0.487351 -0.692623 0.531754 0.035955 -0.989437 0.140430 0.035955 -0.989437 0.140430 0.118499 -0.808681 -0.576188 0.487351 -0.692623 0.531754 0.541186 -0.743352 0.393122 0.888156 -0.358166 0.287915 0.888156 -0.358166 0.287915 0.580406 -0.641805 0.501214 0.821355 -0.569145 -0.038076 0.953945 -0.271896 0.126737 0.487351 -0.692623 0.531754 0.821355 -0.569145 -0.038076 0.858260 -0.293687 0.420877 0.306465 -0.609287 0.731333 0.969670 0.021697 -0.243453 -0.328732 -0.805974 -0.492281 0.253907 -0.728755 0.635962 0.908829 -0.396234 0.130498 0.130104 -0.710182 0.691892 0.974673 -0.186121 0.123980 -0.707895 -0.636379 -0.306441 0.253907 -0.728755 0.635962 0.487351 -0.692623 0.531754 -0.360591 -0.932028 0.036024 0.821355 -0.569145 -0.038076 0.791292 -0.556362 -0.253610 0.967272 -0.240088 0.082108 0.306465 -0.609287 0.731333 0.389153 -0.445894 0.806063 0.130104 -0.710182 0.691892 0.949622 -0.114461 0.291749 -0.740197 0.364519 -0.565008 0.520909 0.830371 -0.197832 0.838823 -0.283592 0.464706 0.799218 -0.327725 0.503832 0.974673 -0.186121 0.123980 0.838823 -0.283592 0.464706 -0.454089 -0.655204 0.603748 0.574256 -0.709839 -0.407871 0.791292 -0.556362 -0.253610 0.821355 -0.569145 -0.038076 0.912133 -0.359755 -0.196444 0.092658 -0.351464 0.931605 0.485833 -0.181514 0.854996 0.799218 -0.327725 0.503832 0.770293 -0.316544 0.553578 -0.496127 -0.862466 0.100049 0.625458 -0.650185 -0.431348 0.523589 0.032396 0.851355 0.306465 -0.609287 0.731333 0.523589 0.032396 0.851355 0.183971 -0.683345 -0.706537 -0.045467 -0.277877 0.959540 0.523589 0.032396 0.851355 0.092658 -0.351464 0.931605 0.005890 -0.423825 0.905725 -0.218750 0.612175 0.759862 -0.045467 -0.277877 0.959540 -0.362072 -0.566724 -0.740086 0.702349 -0.172089 0.690718 0.598198 0.478732 0.642631 0.523589 0.032396 0.851355 0.932594 -0.174928 0.315703 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 5 6 4 6 7 2 2 8 3 7 9 5 8 10 1 4 11 3 7 12 0 0 13 6 9 14 1 5 6 7 6 15 4 6 7 4 6 7 8 6 16 2 2 8 5 8 10 3 7 9 9 8 17 10 8 18 1 4 11 5 8 10 6 10 19 0 11 20 11 12 21 12 13 22 3 14 23 6 10 24 1 5 6 13 6 25 7 6 15 8 6 16 4 6 7 14 6 26 9 8 17 3 7 9 15 8 27 16 8 28 5 8 10 9 8 17 17 8 29 1 4 11 10 8 18 11 15 30 18 16 31 6 10 32 12 13 33 19 17 34 3 3 35 20 18 36 12 13 37 6 10 38 13 6 25 1 5 6 21 19 39 15 8 27 3 7 9 22 20 40 1 4 11 17 8 29 22 20 40 18 16 31 11 15 30 23 21 41 24 22 42 3 3 43 19 17 44 25 23 45 12 13 46 20 18 47 26 24 48 21 19 49 1 5 50 24 22 51 22 25 52 3 26 53 15 8 27 22 20 40 27 8 54 22 20 40 17 8 29 28 8 55 22 20 56 29 27 57 1 5 58 11 28 59 30 29 60 23 21 61 31 30 62 18 16 63 23 21 64 19 31 65 32 32 66 24 33 67 33 34 68 12 13 69 25 23 70 25 35 71 18 36 72 20 37 73 34 38 74 25 39 75 20 40 76 21 19 49 26 24 48 35 24 77 1 5 50 36 24 78 26 24 48 24 33 79 37 41 80 22 25 81 22 20 40 28 8 55 27 8 54 36 24 78 1 5 50 29 27 82 38 42 83 30 43 84 11 44 85 30 29 86 39 45 87 23 21 88 40 45 89 31 30 90 23 21 88 41 46 91 32 32 92 19 31 93 33 34 94 42 47 95 12 13 96 33 34 97 25 39 98 34 48 99 43 49 100 20 40 101 18 36 102 20 40 101 43 49 100 34 38 103 21 19 49 35 24 77 44 24 104 37 41 105 24 33 106 45 50 107 36 24 78 29 27 82 46 24 108 30 43 84 38 42 83 47 42 109 38 42 83 11 44 85 48 51 110 39 45 87 30 29 86 49 45 111 23 21 88 39 45 87 50 45 112 31 30 90 40 45 89 51 45 113 40 45 89 23 21 88 50 45 112 52 52 114 32 32 115 41 46 116 42 47 117 41 46 118 19 53 119 52 54 120 42 47 121 33 34 122 21 19 49 44 24 104 53 24 123 54 24 124 36 24 78 46 24 108 2 55 125 48 51 126 11 44 127 48 51 110 55 42 128 38 42 83 56 45 129 40 45 89 50 45 112 57 56 130 32 32 131 52 57 132 52 52 133 41 58 134 42 47 135 53 24 123 44 24 104 58 24 136 21 19 49 53 24 123 59 24 137 60 24 138 36 24 78 54 24 124 61 24 139 54 24 124 46 24 108 48 51 126 2 55 125 62 55 140 40 45 89 56 45 129 63 45 141 64 59 142 32 32 143 57 56 144 52 57 145 65 60 146 57 61 147 66 24 148 53 24 123 58 24 136 36 24 78 60 24 138 67 24 149 2 55 150 68 62 151 62 55 152 64 59 153 45 63 154 32 32 155 52 52 156 69 64 157 65 60 158 53 24 123 66 24 148 70 24 159 67 24 149 60 24 138 71 24 160 2 55 150 72 62 161 68 62 151 62 55 152 68 62 151 73 62 162 69 64 163 52 65 164 74 66 165 72 62 161 2 55 150 75 62 166

+
+
+
+ + + + 72.063420 76.002159 112.823755 63.208191 79.004898 120.976237 66.891761 61.554929 110.440546 69.019428 93.363959 114.738660 59.779278 62.062196 118.147653 70.486099 62.922087 98.804017 71.118121 95.542660 102.689404 61.039143 51.952049 108.970674 71.263608 72.310239 105.528722 71.349805 76.762080 106.058436 70.670207 72.424443 99.935829 71.112055 85.050787 103.296098 71.920131 64.401388 85.411689 71.105228 76.993229 103.741611 70.966013 87.691898 101.754312 71.991416 59.388711 83.188719 70.947758 71.524041 96.980465 71.551792 92.471034 97.561971 72.561700 55.678802 76.546903 72.245574 68.504985 83.526003 73.409198 89.035683 78.577351 72.484948 52.840927 76.419050 72.388728 59.811868 79.482320 71.898129 72.392543 88.070760 71.184436 87.166456 99.484619 72.650931 91.056760 86.517035 67.685584 49.138428 98.112229 71.228876 75.440216 95.465340 70.971817 82.175748 100.009402 73.336852 93.232847 80.560875 72.206353 92.028810 91.106942 72.198130 76.175337 86.332488 71.222184 80.268890 97.008347 72.997972 92.871923 83.722182 72.432754 98.028581 90.758037 73.234435 80.553142 77.667552 72.891875 96.017372 85.709565 71.831127 94.887623 95.604943 72.811900 76.733726 80.577642 72.121646 98.492059 93.903618 72.337552 74.615347 84.508779 + + + + + + + + + + 0.910580 -0.112594 0.397702 0.664289 -0.183856 0.724512 0.786509 -0.386428 0.481743 0.868652 0.098752 0.485481 0.664289 -0.183856 0.724512 0.702574 -0.253216 0.665035 0.964177 -0.234263 0.124430 0.997190 0.037139 0.065067 0.786509 -0.386428 0.481743 0.824635 -0.532007 0.192212 0.994382 -0.006684 -0.105640 0.910580 -0.112594 0.397702 0.997190 0.037139 0.065067 0.999815 -0.019212 -0.001340 0.994184 -0.031527 0.102973 0.982863 -0.118557 0.141157 0.962538 -0.204501 0.178047 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 0 0 9 2 2 10 5 6 11 6 7 12 3 3 13 0 0 14 2 8 15 7 9 16 5 6 17 8 10 18 0 11 19 5 6 20 0 11 19 9 10 21 6 12 22 0 11 19 8 10 18 9 10 21 10 13 23 8 10 18 5 6 20 6 12 22 9 10 21 11 10 24 5 6 25 12 14 26 10 13 27 11 10 24 9 10 21 13 10 28 6 12 22 11 10 24 14 13 29 5 6 25 15 14 30 12 14 26 10 13 27 12 14 26 16 14 31 14 13 32 17 14 33 6 12 34 5 6 25 18 14 35 15 14 30 16 14 31 12 14 26 19 14 36 14 13 32 20 14 37 17 14 33 18 14 35 5 6 25 21 15 38 22 14 39 15 14 30 18 14 35 23 14 40 16 14 31 19 14 36 24 14 41 20 14 37 14 13 32 17 14 33 20 14 37 25 14 42 5 6 43 26 16 44 21 15 45 16 14 31 23 14 40 27 14 46 28 14 47 20 14 37 24 14 41 29 14 48 25 14 42 20 14 37 17 14 33 25 14 42 30 14 49 27 14 46 23 14 40 31 14 50 32 14 51 20 14 37 28 14 47 25 14 42 29 14 48 33 14 52 34 14 53 17 14 33 30 14 49 27 14 46 31 14 50 32 14 51 32 14 51 35 14 54 20 14 37 36 14 55 25 14 42 33 14 52 17 14 33 34 14 53 37 14 56 32 14 51 31 14 50 38 14 57 32 14 51 38 14 57 35 14 54 37 14 56 34 14 53 39 14 58 38 14 57 31 14 50 40 14 59

+
+
+
+ + + + 67.020414 66.068205 65.928604 67.835167 64.518541 69.870490 67.074871 61.638001 65.879363 68.595068 67.291805 73.851904 68.823659 60.204115 74.477006 68.100792 67.514852 71.408699 67.901383 59.438304 69.832772 67.979024 57.469088 70.076757 68.635675 55.238588 73.182782 68.294711 50.986822 71.178978 + + + + + + + + + + 0.980296 0.014240 -0.197020 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + 67.680479 96.549454 72.639919 71.500897 99.435041 70.501984 68.042929 97.505366 69.326112 72.414983 99.417657 73.218401 65.258901 94.942314 72.983099 67.422782 95.480419 76.771844 70.934234 98.176173 74.608281 + + + + + + + + + + -0.525661 0.830953 0.182205 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 34.927379 17.088715 67.938904 38.168351 18.364762 67.399728 37.131041 18.004081 67.283676 39.890177 18.668784 69.374146 34.020210 16.453104 69.773445 41.968113 19.208480 70.712078 35.131171 16.564346 71.560867 49.403732 21.198176 75.146153 44.509180 20.269724 69.921904 37.929632 17.427505 72.538413 50.041742 22.339526 69.657550 38.419201 17.206582 74.958350 48.831648 22.236985 67.598000 35.730384 15.861539 77.137394 44.267606 20.909379 65.519178 37.135441 16.740912 74.931402 46.517684 19.816898 77.107554 47.494733 22.227708 64.693684 40.011983 17.316600 77.820098 46.111839 21.770739 64.394607 36.003413 15.738331 78.486974 44.736077 18.571981 80.690053 39.844270 17.236279 77.934397 42.245542 17.822104 79.709383 37.100529 15.891855 79.988078 43.612368 18.034086 81.454238 + + + + + + + + + + -0.339804 0.927893 0.153455 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 31.135687 32.168675 127.619391 28.170878 28.311555 123.436570 33.690952 31.225196 120.610292 + + + + + + + + + + 0.562721 -0.766969 0.308388 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 65.141683 82.031398 78.163442 63.840007 83.206775 72.772170 63.490048 82.237731 69.043439 64.121629 84.794641 77.206007 65.237869 79.979050 75.075661 62.915319 85.632993 71.764674 62.968082 82.718266 66.897907 62.993968 87.067459 74.760676 63.872330 79.174306 65.805475 61.396282 88.621408 68.340765 64.791649 77.289495 67.742420 61.971011 85.226146 65.619531 65.403610 77.013341 70.767341 61.275928 86.666777 64.182628 66.600506 76.179238 76.162136 60.171537 90.889038 65.328644 67.106396 72.766042 73.013723 60.557543 90.714848 67.236671 + + + + + + + + + + -0.942521 -0.291072 0.164106 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 65.294238 36.973416 71.231270 65.448918 40.489124 66.930938 64.231099 35.897194 68.174163 66.212248 38.588688 72.902055 66.978889 41.828241 71.626644 66.300997 41.322334 69.422601 67.571343 44.740492 70.063612 + + + + + + + + + + -0.927842 0.304416 0.215500 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 60.655686 78.816359 125.905907 56.884153 65.064990 128.363745 59.779278 62.062196 118.147653 + + + + + + + + + + 0.926586 -0.196857 0.320446 0.929653 -0.185905 0.318096 0.949622 -0.114461 0.291749 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 79.914887 33.164519 30.505669 80.477735 29.636920 29.284737 80.257921 30.548531 28.941535 80.002343 34.291788 33.263827 80.923624 27.822338 30.041803 81.594113 23.766896 28.845648 81.838160 21.539960 27.089188 80.832801 32.764799 37.933572 82.989112 13.524882 23.182121 82.046767 22.526936 30.673701 80.831599 26.862320 27.537455 82.407155 18.596048 26.949708 81.970877 24.985869 34.327954 82.095169 17.185770 21.704619 83.271137 14.712322 27.769713 82.318311 20.570993 29.637630 82.341042 22.932930 33.994838 80.283840 28.362803 25.325317 82.817770 17.298089 28.303313 81.485252 29.877746 38.633420 81.762654 17.941893 20.089493 82.901849 17.414087 29.252209 82.821192 21.963963 36.543280 79.661019 31.564686 25.441892 83.425973 14.752388 29.211802 83.061276 25.795583 45.411801 79.378321 31.591191 22.984286 82.770965 19.331033 31.465683 83.220442 19.485036 35.718281 81.367255 17.812912 16.359961 84.117739 13.450582 33.049178 83.589018 18.129078 36.597437 80.193908 25.566004 19.607677 83.336783 17.781084 33.750748 84.103397 16.068809 37.528926 79.248729 30.579344 20.055963 80.906952 20.440664 16.905981 84.511045 15.416344 39.992002 79.631733 27.879657 18.698624 80.704702 20.994689 16.089191 83.569940 22.705897 44.481396 80.207577 24.161723 17.257914 84.526516 16.895538 42.731707 80.139378 22.852758 14.350641 84.989413 19.235233 50.948932 85.530202 16.833133 51.512921 + + + + + + + + + + 0.974417 -0.085360 -0.207907 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 62.413211 11.051422 29.091747 59.471245 12.239636 30.739269 60.195696 12.091880 28.261510 63.360543 10.567426 30.011621 63.706546 10.240142 32.500786 60.507831 11.581632 33.582822 62.280191 10.747245 34.286386 57.806499 12.667649 35.167254 63.451222 10.069569 36.559243 56.831827 12.962505 37.126492 66.972478 8.315489 39.335480 65.896721 9.010544 36.210134 65.484716 8.988176 39.141422 59.093306 11.862544 38.529327 62.220717 10.391417 39.753805 60.097250 11.235415 41.138052 64.236769 9.416105 40.928924 62.510616 10.009702 43.377251 64.693755 8.983843 44.216501 63.284333 9.582428 44.586028 + + + + + + + + + + -0.403568 -0.910929 -0.085680 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 84.208722 48.060467 87.979847 62.166894 3.477661 69.546159 66.384612 34.553806 86.130982 67.984184 12.991249 91.707145 79.155042 24.306272 96.092638 72.573549 9.416048 92.985689 + + + + + + + + + + -0.604890 0.796004 0.022026 -0.836686 0.296253 -0.460641 -0.669527 0.404682 0.622869 -0.567671 0.114566 0.815245 -0.292371 0.293232 0.910238 -0.059686 0.184123 0.981089 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 2 3 1 1 4 3 3 5 0 0 6 2 2 7 4 4 8 5 5 9 2 2 10 3 3 11

+
+
+
+ + + + 78.069334 -10.585714 62.994074 64.529428 -10.794303 54.339606 76.601646 -11.075041 51.076916 79.993415 1.721886 74.603039 69.727084 -9.243045 50.232045 68.981613 -4.794479 66.393234 86.295466 0.940387 62.270260 67.598260 -7.385467 47.884833 75.383503 -10.239941 50.102075 74.422281 -1.488813 70.672938 66.461522 -5.096553 67.600999 86.186091 -5.623954 51.153575 93.976218 9.792579 85.875797 67.301070 -4.699017 43.633960 75.049913 -9.015044 48.224855 76.344772 0.721020 74.889065 70.740555 -2.457542 71.068525 65.135260 -4.260576 70.819002 95.088034 7.617171 61.806952 80.552836 7.102159 84.611676 92.366328 14.037827 72.783708 65.080660 -3.176812 41.857036 62.800927 -1.755909 79.164820 96.617200 2.297084 50.603926 74.357291 6.391482 85.644777 92.376227 14.057627 72.795776 61.893065 -6.478330 57.059342 70.898677 -0.418741 76.235237 66.724999 -2.077953 75.227503 57.958767 -4.686914 67.619595 93.618555 1.596114 40.073837 107.864396 18.201068 40.035761 77.736833 12.587704 89.811577 100.047939 29.401647 82.148266 98.202558 20.732014 72.886859 62.131486 -5.761430 55.265093 61.283082 -6.299038 58.834303 69.815777 11.487551 84.633561 88.343201 -1.657408 43.368029 96.823716 4.628916 42.717034 102.664394 11.539533 42.314023 83.057662 13.255825 88.934557 91.224229 12.795338 91.179859 102.864212 16.525217 61.928516 61.080790 -0.585130 51.515986 72.573549 9.416048 92.985689 58.772585 -4.721789 64.965777 88.619901 -0.466334 40.702982 82.936516 -3.796250 43.823930 94.821941 2.998782 40.070620 98.655276 6.481288 43.777526 104.002679 13.699850 40.046082 101.547384 40.073936 96.068764 62.344068 -0.000632 46.503463 59.459059 -4.263960 62.045878 85.762553 -2.122884 42.229115 99.082109 7.539371 41.662294 83.727055 21.507943 102.367215 60.917169 -0.376191 51.767178 62.998660 0.515506 43.607900 59.639328 -3.721914 60.689251 58.162193 -2.941120 64.502794 81.926359 -3.228094 41.545452 101.567040 10.436995 41.651172 79.155042 24.306272 96.092638 84.208722 48.060467 87.979847 60.115147 2.571089 50.309232 59.458221 -1.997883 58.879867 77.302244 -2.151100 44.666577 109.425028 46.267785 82.935498 60.742865 3.252703 47.271519 59.042715 -2.189850 60.528149 76.435004 -0.446775 44.270318 78.995722 -0.968822 42.493328 100.931898 35.693220 90.354736 74.998559 -1.960856 46.447760 + + + + + + + + + + 0.324383 -0.764031 0.557703 -0.289127 -0.943424 0.162347 -0.086231 -0.943453 -0.320096 0.222963 -0.776965 0.588739 -0.289127 -0.943424 0.162347 -0.289127 -0.943424 0.162347 -0.144832 -0.760444 -0.633047 0.222963 -0.776965 0.588739 0.260975 -0.857018 0.444310 -0.289127 -0.943424 0.162347 0.711725 -0.620492 0.329298 0.711725 -0.620492 0.329298 0.324383 -0.764031 0.557703 0.589480 -0.806575 -0.044157 0.810019 -0.567781 0.146608 0.222963 -0.776965 0.588739 0.589480 -0.806575 -0.044157 0.693800 -0.542250 0.473926 0.082675 -0.621088 0.779368 0.913973 -0.294110 -0.279557 -0.551940 -0.628068 -0.548537 0.001127 -0.722852 0.691002 0.726776 -0.670086 0.150934 -0.101604 -0.661031 0.743448 0.857670 -0.493791 0.143436 -0.863387 -0.363557 -0.349840 0.001127 -0.722852 0.691002 0.222963 -0.776965 0.588739 -0.645833 -0.762335 0.041779 -0.289127 -0.943424 0.162347 0.589480 -0.806575 -0.044157 0.559229 -0.776272 -0.290971 0.834304 -0.543034 0.095135 0.082675 -0.621088 0.779368 0.200288 -0.495901 0.844966 0.847373 -0.413155 0.333558 -0.550402 0.556937 -0.621995 0.759039 0.609835 -0.227949 0.674930 -0.523514 0.520003 0.621115 -0.547896 0.560380 0.857670 -0.493791 0.143436 0.674930 -0.523514 0.520003 -0.606573 -0.443237 0.660008 0.301529 -0.835103 -0.460090 0.559229 -0.776272 -0.290971 0.739070 -0.634454 -0.226370 -0.024183 -0.317875 0.947824 0.200288 -0.495901 0.844966 0.357018 -0.295414 0.886154 0.621115 -0.547896 0.560380 0.593483 -0.524319 0.610629 -0.749957 -0.651262 0.115856 0.366397 -0.794050 -0.485013 0.451880 -0.125990 0.883137 -0.024183 -0.317875 0.947824 -0.024183 -0.317875 0.947824 0.082675 -0.621088 0.779368 0.451880 -0.125990 0.883137 0.357018 -0.295414 0.886154 -0.046177 -0.651909 -0.756890 -0.116690 -0.215704 0.969461 0.451880 -0.125990 0.883137 -0.024183 -0.317875 0.947824 -0.117621 -0.355166 0.927374 0.451880 -0.125990 0.883137 -0.005634 0.593529 0.804793 -0.116690 -0.215704 0.969461 -0.483908 -0.382256 -0.787219 0.562624 -0.363796 0.742365 0.675412 0.239844 0.697347 0.451880 -0.125990 0.883137 0.810020 -0.462838 0.360066 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 5 6 4 6 7 2 2 8 3 7 9 5 8 10 1 9 11 3 7 12 0 0 13 6 10 14 1 5 6 7 6 15 4 6 7 4 6 7 8 6 16 2 2 8 5 8 10 3 7 9 9 8 17 10 8 18 1 9 11 5 8 10 6 11 19 0 12 20 11 13 21 12 14 22 3 15 23 6 11 24 1 5 6 13 6 25 7 6 15 8 6 16 4 6 7 14 6 26 9 8 17 3 7 9 15 8 27 16 8 28 5 8 10 9 8 17 17 8 29 1 9 11 10 8 18 11 16 30 18 17 31 6 11 32 12 14 33 19 18 34 3 3 35 20 19 36 12 14 37 6 11 38 13 6 25 1 5 6 21 20 39 15 8 27 3 7 9 22 21 40 1 9 11 17 8 29 22 21 40 18 17 31 11 16 30 23 22 41 24 23 42 3 3 43 19 18 44 25 24 45 12 14 46 20 19 47 26 25 48 21 20 49 1 5 50 24 23 51 22 26 52 3 27 53 15 8 27 22 21 40 27 8 54 22 21 40 17 8 29 28 8 55 22 21 56 29 28 57 1 29 58 11 30 59 30 31 60 23 22 61 31 32 62 18 17 63 23 22 64 19 33 65 32 34 66 24 23 67 33 35 68 12 14 69 25 24 70 25 36 71 18 37 72 20 38 73 34 39 74 25 40 75 20 41 76 21 20 49 26 25 48 35 25 77 1 5 50 36 25 78 26 25 48 24 23 79 37 42 80 22 26 81 22 21 40 28 8 55 27 8 54 36 25 78 1 5 50 29 28 82 38 43 83 30 44 84 11 30 85 30 31 86 39 45 87 23 22 88 40 45 89 31 32 90 23 22 88 41 46 91 32 47 92 19 33 93 33 35 94 42 48 95 12 14 96 33 35 97 25 40 98 34 49 99 43 50 100 20 41 101 18 37 102 20 41 101 43 50 100 34 39 103 21 20 49 35 25 77 44 25 104 37 42 105 24 23 106 45 51 107 36 25 78 29 28 82 46 25 108 30 44 84 38 43 83 47 43 109 38 43 83 11 30 85 48 52 110 39 45 87 30 31 86 49 45 111 23 22 88 39 45 87 50 45 112 31 32 90 40 45 89 51 45 113 40 45 89 23 22 88 50 45 112 52 53 114 32 47 115 41 54 116 42 48 117 41 55 118 19 56 119 52 57 120 42 58 121 33 35 122 21 20 49 44 25 104 53 25 123 54 25 124 36 25 78 46 25 108 2 59 125 48 52 126 11 30 127 48 52 110 55 43 128 38 43 83 56 45 129 40 45 89 50 45 112 57 60 130 32 47 131 52 61 132 52 53 133 41 62 134 42 58 135 53 25 123 44 25 104 58 25 136 21 20 49 53 25 123 59 25 137 60 25 138 36 25 78 54 25 124 61 25 139 54 25 124 46 25 108 48 52 126 2 59 125 62 59 140 40 45 89 56 45 129 63 45 141 64 63 142 32 47 143 57 60 144 52 64 145 65 65 146 57 66 147 66 25 148 53 25 123 58 25 136 36 25 78 60 25 138 67 25 149 2 59 150 68 67 151 62 59 152 64 63 153 45 68 154 32 47 155 52 53 156 69 69 157 65 65 158 53 25 123 66 25 148 70 25 159 67 25 149 60 25 138 71 25 160 2 59 150 72 67 161 68 67 151 62 59 152 68 67 151 73 67 162 69 69 163 52 70 164 74 71 165 72 67 161 2 59 150 75 67 166

+
+
+
+ + + + 114.117951 37.359904 76.775388 107.694546 42.351577 85.002873 105.749905 26.957888 74.370254 116.563205 52.498861 78.707911 100.047939 29.401647 82.148266 109.094066 27.057456 62.626669 118.909193 53.691724 66.547801 98.202558 20.732014 72.886859 112.408445 34.553032 69.413241 113.747901 38.187917 69.947828 111.953208 34.816004 63.768893 115.914429 45.069050 67.160077 110.694395 27.864809 49.111132 113.612725 38.447619 67.609688 116.546996 47.281681 65.604106 109.324574 23.724036 46.867710 111.924777 33.996776 60.786340 118.390380 51.043245 61.373196 108.736171 20.511952 40.164790 112.131279 31.145260 47.208097 118.938240 47.690070 42.213918 107.864396 18.201068 40.035761 109.771751 23.958656 43.127212 112.953479 34.439872 51.794665 116.576811 46.787522 63.313532 118.890867 49.567487 50.226646 102.864212 16.525217 61.928516 113.271817 37.135795 59.257276 114.979878 42.745707 63.843143 120.074801 51.160788 44.215690 118.802418 50.493208 54.858780 114.278033 37.463870 50.040402 114.642301 41.106911 60.814478 119.693389 50.960673 47.406081 120.698223 55.360552 54.506666 116.377388 40.767154 41.295748 120.502507 53.576490 49.411748 119.308630 52.950100 59.398163 114.941676 37.747970 44.232611 120.574563 55.830188 57.681186 113.948102 36.141816 48.199915 + + + + + + + + + + 0.801846 -0.394010 0.449220 0.522044 -0.360002 0.773220 0.593220 -0.599162 0.537676 0.820324 -0.183873 0.541535 0.522044 -0.360002 0.773220 0.541003 -0.437149 0.718482 0.832036 -0.535717 0.143955 0.953649 -0.291317 0.075422 0.593220 -0.599162 0.537676 0.601066 -0.767876 0.221556 0.935537 -0.331378 -0.122306 0.801846 -0.394010 0.449220 0.938369 -0.345633 -0.001555 0.927318 -0.354775 0.119231 0.886764 -0.432459 0.163181 0.837881 -0.505728 0.205412 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 0 0 9 2 2 10 5 6 11 6 7 12 3 3 13 0 0 14 2 8 15 7 9 16 5 6 17 8 10 18 0 11 19 5 6 20 0 11 19 9 10 21 6 7 22 0 11 19 8 10 18 9 10 21 10 12 23 8 10 18 5 6 20 6 7 22 9 10 21 11 10 24 5 6 25 12 13 26 10 12 27 11 10 24 9 10 21 13 10 28 6 7 22 11 10 24 14 12 29 5 6 25 15 13 30 12 13 26 10 12 27 12 13 26 16 13 31 14 12 32 17 13 33 6 7 34 5 6 25 18 13 35 15 13 30 16 13 31 12 13 26 19 13 36 14 12 32 20 13 37 17 13 33 18 13 35 5 6 25 21 14 38 22 13 39 15 13 30 18 13 35 23 13 40 16 13 31 19 13 36 24 13 41 20 13 37 14 12 32 17 13 33 20 13 37 25 13 42 5 6 43 26 15 44 21 14 45 16 13 31 23 13 40 27 13 46 28 13 47 20 13 37 24 13 41 29 13 48 25 13 42 20 13 37 17 13 33 25 13 42 30 13 49 27 13 46 23 13 40 31 13 50 32 13 51 20 13 37 28 13 47 25 13 42 29 13 48 33 13 52 34 13 53 17 13 33 30 13 49 27 13 46 31 13 50 32 13 51 32 13 51 35 13 54 20 13 37 36 13 55 25 13 42 33 13 52 17 13 33 34 13 53 37 13 56 32 13 51 31 13 50 38 13 57 32 13 51 38 13 57 35 13 54 37 13 56 34 13 53 39 13 58 38 13 57 31 13 50 40 13 59

+
+
+
+ + + + 107.141767 30.631176 29.448802 107.369907 29.125167 33.426954 105.924095 26.973994 29.399108 108.784824 31.188269 37.444996 106.953011 25.296993 38.075849 108.442085 31.512466 34.979314 105.976666 24.930303 33.388889 105.479338 23.289467 33.635118 105.383504 21.268858 36.769719 103.891640 17.871034 34.747479 + + + + + + + + + + 0.924702 -0.305590 -0.227026 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + 116.370321 55.498912 36.221861 120.333016 56.782217 34.064257 116.940657 56.181395 32.877567 121.079448 56.507448 36.805665 113.921792 54.867886 36.568199 115.853859 54.693590 40.391800 119.508486 55.908894 38.208332 + + + + + + + + + + -0.223215 0.951845 0.210155 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 66.803780 -0.485083 31.477597 69.831509 -0.359714 30.933460 68.876052 -0.360603 30.816341 71.333498 -0.600459 32.926043 65.876956 -0.749053 33.329016 73.195371 -0.748956 34.276284 66.821874 -0.974191 35.132882 79.874495 -1.232273 38.751152 75.586561 -0.600710 33.478840 69.368198 -1.062120 36.119422 80.724185 -0.475882 33.212055 69.707674 -1.383229 38.561621 79.700258 -0.215342 31.133556 67.114159 -1.722657 40.760713 75.570262 -0.006069 29.035609 68.519714 -1.400192 38.534425 77.108529 -1.545283 40.730599 78.598659 0.158001 28.202521 71.048304 -1.746674 41.449698 77.331691 0.176440 27.900693 67.303482 -1.901737 42.122709 75.289282 -2.060925 44.346056 70.887554 -1.764907 41.565048 73.028355 -1.967624 43.356364 68.249068 -2.088175 43.637623 74.212305 -2.182866 45.117272 + + + + + + + + + + -0.017067 0.984011 0.177288 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 67.984184 12.991249 91.707145 64.447957 10.665522 87.485841 69.815777 11.487551 84.633561 + + + + + + + + + + 0.275967 -0.894396 0.351992 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 110.146327 44.288413 41.796200 109.411277 45.625510 36.355329 108.847467 44.928672 32.592294 110.095252 46.850497 40.829957 109.640554 42.573958 38.680012 109.342553 47.883380 35.338564 108.555341 45.472416 30.427022 109.815971 49.040111 38.362129 108.288748 42.301577 29.324540 108.945475 50.772748 31.883155 108.507338 40.490278 31.279305 108.450389 47.818040 29.136885 108.931682 40.088892 34.332055 108.289550 49.200322 27.686763 109.677852 39.062183 39.776483 108.584912 52.985763 28.843322 109.121070 36.112349 36.599104 108.852575 52.732581 30.768903 + + + + + + + + + + -0.981311 0.033544 0.189483 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 97.431950 7.206922 34.800252 98.560938 10.052783 30.460357 96.251359 6.625211 31.715020 98.646851 8.273090 36.486409 100.200183 10.717566 35.199264 99.498787 10.494879 32.974943 101.517064 12.942633 33.621852 + + + + + + + + + + -0.770789 0.586840 0.247997 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 105.542641 42.923961 89.977896 98.523803 32.694967 92.458346 100.047939 29.401647 82.148266 + + + + + + + + + + 0.796976 -0.481032 0.365292 0.803550 -0.471968 0.362704 0.847373 -0.413155 0.333558 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 19.969732 72.774171 34.037392 18.811464 76.713415 32.827590 19.195562 75.706315 32.487517 20.035106 71.476211 36.770406 18.037393 78.716848 33.577754 16.679110 83.242101 32.392504 16.074765 85.745200 30.652055 18.864721 73.095454 41.397581 13.588869 94.714309 26.780606 15.981475 84.589440 34.203892 18.001720 79.824801 31.096237 14.994965 89.018107 30.513847 16.428086 81.797227 37.824832 15.144124 90.671594 25.316573 13.441235 93.319257 31.326377 15.385467 86.779564 33.177266 15.705440 84.083489 37.494753 18.845931 78.194378 28.904264 14.336764 90.437724 31.855112 17.698102 76.291317 42.091049 15.633938 89.858263 23.716171 14.257897 90.293167 32.795358 15.016143 85.117854 40.019963 20.024873 74.635251 29.019777 13.270580 93.250906 32.755320 15.303544 80.714425 48.807638 20.351048 74.646432 26.584574 14.687833 88.126903 34.988654 14.197956 87.885509 39.202484 16.065828 90.063212 20.020638 12.291287 94.633737 36.557713 13.579189 89.377405 40.073626 18.538890 81.396061 23.238747 13.815790 89.811077 37.252889 12.691057 91.650905 40.996624 20.350612 75.818974 23.682947 16.975414 87.134829 20.561680 12.130766 92.335057 43.437246 19.518618 78.840688 22.337981 17.287123 86.532826 19.752337 14.271177 84.162358 47.885714 18.317661 82.995022 20.910405 12.329744 90.646424 46.151975 18.203724 84.497306 18.029635 12.144644 87.911247 54.294292 11.176345 90.570587 54.853139 + + + + + + + + + + -0.982822 -0.039702 -0.180233 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 36.684380 100.547373 32.636359 40.212398 99.623566 34.268862 39.364844 99.685927 31.813691 35.533489 100.960427 33.547847 35.091097 101.282881 36.014321 38.934269 100.221930 37.086493 36.791487 100.913632 37.783643 42.172985 99.379396 38.656481 35.357203 101.514734 40.035780 43.327347 99.185976 40.597859 31.085895 102.998829 42.786708 32.414105 102.363961 39.689854 32.880540 102.449797 42.594420 40.588048 100.108820 41.987905 36.807182 101.328006 43.201220 39.351655 100.676763 44.572849 34.365939 102.144684 44.365627 36.420781 101.720731 46.791634 33.781647 102.570566 47.623234 35.476128 102.094543 47.989392 + + + + + + + + + + 0.284491 0.955824 -0.073932 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 17.255901 55.163121 90.987627 35.856079 109.217974 72.721984 35.598486 73.171702 89.155617 30.617218 97.520032 94.680947 19.538780 82.984363 99.026461 24.861514 100.923895 95.947835 + + + + + + + + + + 0.701419 -0.712496 0.018989 0.892266 -0.192562 -0.408389 0.754191 -0.333115 0.565890 0.634312 -0.045361 0.771746 0.368949 -0.285821 0.884411 0.095269 -0.201719 0.974799 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 2 3 1 1 4 3 3 5 0 0 6 2 2 7 4 4 8 5 5 9 2 2 10 3 3 11

+
+
+
+ + + + 15.666901 122.922259 66.229628 31.072639 125.142873 57.654056 17.268493 123.695052 54.421109 15.275686 108.609110 77.732765 25.374177 122.613193 53.583940 26.875514 117.650737 69.597801 7.976534 108.577180 65.512413 28.073187 120.807195 51.258124 18.779541 122.921376 53.455154 21.156922 113.085342 73.838490 29.704329 118.364166 70.794555 7.139933 116.076930 54.497069 0.516360 97.360380 88.902758 28.805411 117.788006 47.046003 19.339229 121.573774 51.595047 19.288782 110.284470 78.016183 25.212443 114.728908 74.230471 31.338766 117.605323 73.983223 -1.069746 99.677655 65.053328 15.425810 102.393362 87.650162 2.973412 92.756304 75.930019 31.559722 116.377764 45.285278 34.366831 115.091701 82.252959 -3.592168 105.518932 53.952431 22.385020 104.110862 88.673845 2.965025 92.732282 75.941977 34.710286 120.608486 60.348998 25.330740 112.381401 79.350083 29.845999 114.884202 78.351535 39.457954 119.142313 70.812982 -0.276189 106.757205 43.518335 -14.085619 85.740399 43.480606 19.439532 96.551900 92.802659 -3.534166 74.115741 85.209208 -2.699922 84.269871 76.032229 34.543457 119.756263 58.571106 35.431958 120.493410 62.107778 28.308890 98.966114 87.671847 5.261570 111.238943 46.782497 -3.486130 102.830264 46.137436 -9.132841 94.096436 45.738099 13.471316 95.011011 91.933634 4.093519 94.340065 94.158468 -8.630531 88.383204 65.173784 36.499339 114.008845 54.856176 24.861514 100.923895 95.947835 38.525047 119.062895 68.183357 5.120540 109.840529 44.141745 11.112292 114.469114 47.234242 -1.442710 104.981856 43.515147 -5.302951 100.450234 47.188261 -10.342203 91.437568 43.490833 -3.680751 61.729126 99.002804 35.144723 113.157485 49.889348 37.809472 118.440415 65.290076 8.135495 112.147529 45.653966 -5.634618 99.181452 45.092312 13.916626 85.505087 105.243838 36.716474 113.794603 55.105078 34.474034 112.473199 47.020181 37.683333 117.796052 63.945816 39.481694 117.122216 67.724594 12.347131 113.969312 44.976534 -8.043251 95.514093 45.081291 19.538780 82.984363 99.026461 17.255901 55.163121 90.987627 38.062431 110.551981 53.660423 38.142276 115.857077 62.152927 17.776610 113.418640 48.069207 -11.754686 53.514159 85.989264 37.446613 109.682977 50.650402 38.587865 116.136778 63.786183 19.014898 111.602611 47.676560 16.019080 111.822779 45.915770 -3.620582 66.813532 93.340866 20.430805 113.539108 49.834153 + + + + + + + + + + -0.436894 0.746932 0.501214 0.167141 0.975881 0.140430 -0.035137 0.959459 -0.279649 -0.335413 0.777647 0.531754 0.167141 0.975881 0.140430 0.049378 0.815824 -0.576188 -0.335413 0.777647 0.531754 -0.377737 0.838314 0.393122 -0.796144 0.532222 0.287915 -0.796144 0.532222 0.287915 -0.436894 0.746932 0.501214 -0.687610 0.725081 -0.038076 -0.878183 0.461228 0.126737 -0.687610 0.725081 -0.038076 -0.780065 0.462991 0.420877 -0.175392 0.659082 0.731333 -0.953615 0.177055 -0.243453 0.486604 0.721717 -0.492281 -0.099514 0.765277 0.635962 -0.808594 0.573712 0.130498 0.017875 0.721780 0.691892 -0.916015 0.381505 0.123980 0.823072 0.478169 -0.306441 0.167141 0.975881 0.140430 -0.099514 0.765277 0.635962 0.543567 0.838592 0.036024 0.167141 0.975881 0.140430 -0.660796 0.706421 -0.253610 -0.897734 0.432818 0.082108 -0.175392 0.659082 0.731333 -0.289746 0.516051 0.806063 0.017875 0.721780 0.691892 -0.906147 0.306236 0.291749 0.650012 -0.508183 -0.565008 -0.679708 -0.706300 -0.197832 -0.763103 0.449135 0.464706 -0.715310 0.484236 0.503832 -0.916015 0.381505 0.123980 -0.763103 0.449135 0.464706 0.017875 0.721780 0.691892 0.578480 0.548498 0.603748 -0.416961 0.812271 -0.407871 -0.660796 0.706421 -0.253610 -0.687610 0.725081 -0.038076 -0.819289 0.538680 -0.196444 -0.018827 0.362985 0.931605 -0.289746 0.516051 0.806063 -0.906147 0.306236 0.291749 -0.438448 0.277029 0.854996 -0.715310 0.484236 0.503832 -0.689282 0.467377 0.553578 0.662014 0.742784 0.100049 -0.479281 0.764349 -0.431348 -0.519149 0.075361 0.851355 -0.018827 0.362985 0.931605 -0.519149 0.075361 0.851355 -0.438448 0.277029 0.854996 -0.040342 0.706525 -0.706537 0.101331 0.262707 0.959540 -0.519149 0.075361 0.851355 -0.018827 0.362985 0.931605 0.080905 0.416073 0.905725 0.088940 -0.643972 0.759862 0.470313 0.480706 -0.740086 -0.652315 0.312080 0.690718 -0.683455 -0.346286 0.642631 -0.519149 0.075361 0.851355 -0.877114 0.361943 0.315703 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 3 6 9 5 7 10 1 4 11 3 6 12 0 0 13 6 8 14 1 1 6 7 5 15 4 5 7 4 5 7 8 5 16 2 2 8 5 7 10 3 6 9 9 7 17 10 7 18 1 4 11 5 7 10 6 9 19 0 10 20 11 11 21 12 12 22 3 3 23 6 9 24 1 1 6 13 5 25 7 5 15 8 5 16 4 5 7 14 5 26 9 7 17 3 6 9 15 7 27 16 7 28 5 7 10 9 7 17 17 7 29 1 4 11 10 7 18 11 13 30 18 14 31 6 9 32 12 12 33 19 15 34 3 3 35 20 16 36 12 12 37 6 9 38 13 5 25 1 1 6 21 17 39 15 7 27 3 6 9 22 18 40 1 4 11 17 7 29 22 18 40 18 14 31 11 13 30 23 19 41 24 20 42 3 3 43 19 15 44 25 21 45 12 12 46 20 16 47 26 22 48 21 17 49 1 23 50 24 20 51 22 24 52 3 6 53 15 7 27 22 18 40 27 7 54 22 18 40 17 7 29 28 7 55 22 18 56 29 25 57 1 26 58 11 13 59 30 27 60 23 19 61 31 28 62 18 14 63 23 19 64 19 29 65 32 30 66 24 31 67 33 32 68 12 12 69 25 21 70 25 33 71 18 34 72 20 35 73 34 36 74 25 37 75 20 38 76 21 17 49 26 22 48 35 22 77 1 23 50 36 22 78 26 22 48 24 39 79 37 40 80 22 24 81 22 18 40 28 7 55 27 7 54 36 22 78 1 23 50 29 25 82 38 41 83 30 42 84 11 43 85 30 27 86 39 44 87 23 19 88 40 44 89 31 28 90 23 19 88 41 45 91 32 46 92 19 29 93 33 47 94 42 48 95 12 12 96 33 32 97 25 37 98 34 49 99 43 50 100 20 38 101 18 34 102 20 38 101 43 50 100 34 36 103 21 17 49 35 22 77 44 22 104 37 40 105 24 39 106 45 51 107 36 22 78 29 25 82 46 22 108 30 42 84 38 41 83 47 41 109 38 41 83 11 43 85 48 52 110 39 44 87 30 27 86 49 44 111 23 19 88 39 44 87 50 44 112 31 28 90 40 44 89 51 44 113 40 44 89 23 19 88 50 44 112 52 53 114 32 46 115 41 54 116 42 48 117 41 45 118 19 15 119 52 55 120 42 56 121 33 47 122 21 17 49 44 22 104 53 22 123 54 22 124 36 22 78 46 22 108 2 57 125 48 52 126 11 43 127 48 52 110 55 41 128 38 41 83 56 44 129 40 44 89 50 44 112 57 58 130 32 46 131 52 59 132 52 53 133 41 60 134 42 56 135 53 22 123 44 22 104 58 22 136 21 17 49 53 22 123 59 22 137 60 22 138 36 22 78 54 22 124 61 22 139 54 22 124 46 22 108 48 52 126 2 57 125 62 57 140 40 44 89 56 44 129 63 44 141 64 61 142 32 46 143 57 58 144 52 59 145 65 62 146 57 58 147 66 22 148 53 22 123 58 22 136 36 22 78 60 22 138 67 22 149 2 57 150 68 63 151 62 57 152 64 61 153 45 64 154 32 46 155 52 53 156 69 65 157 65 62 158 53 22 123 66 22 148 70 22 159 67 22 149 60 22 138 71 22 160 2 57 150 72 63 161 68 63 151 62 57 152 68 63 151 73 63 162 69 65 163 52 66 164 74 67 165 72 63 161 2 57 150 75 63 166

+
+
+
+ + + + -18.409380 62.982424 79.885310 -10.355333 58.232281 88.037792 -10.392610 76.066762 77.502102 -18.980134 45.365040 81.800215 -3.534166 74.115741 85.209208 -14.190568 75.463524 65.865573 -21.480013 43.661555 69.750959 -2.699922 84.269871 76.032229 -16.871486 66.432766 72.590277 -17.866246 62.092631 73.119991 -16.313980 66.199627 66.997384 -19.328528 53.930467 70.357653 -15.896806 74.308738 52.473244 -17.674106 61.816351 70.803166 -19.725669 51.315304 68.815867 -14.941513 79.230062 50.250274 -16.401537 67.137758 64.042020 -21.276384 46.756953 64.623526 -14.741083 82.978191 43.608459 -17.054542 70.358413 50.587558 -22.392022 50.499538 45.638906 -14.085619 85.740399 43.480606 -15.416963 78.897096 46.543875 -17.509429 66.481958 55.132315 -19.832025 51.874308 66.546174 -22.063081 48.366110 53.578590 -8.630531 88.383204 65.173784 -17.477557 63.361831 62.526896 -18.603319 56.716071 67.070957 -23.179509 46.376277 47.622431 -21.826679 47.323687 58.168497 -18.576657 62.840453 53.394044 -18.458450 58.633830 64.069902 -22.773982 46.660274 50.783737 -23.275225 41.497005 57.819593 -20.486306 58.767083 44.729107 -23.313359 43.559600 52.771120 -22.043998 44.448556 62.666498 -19.291645 62.419378 47.639198 -23.065471 40.979701 60.965173 -18.394121 64.395988 51.570334 + + + + + + + + + + -0.868312 0.296424 0.397702 -0.612653 0.315815 0.724512 -0.690865 0.539100 0.481743 -0.870489 0.080970 0.485481 -0.612653 0.315815 0.724512 -0.635945 0.391539 0.665035 -0.895896 0.426483 0.124430 -0.983711 0.167567 0.065067 -0.690865 0.539100 0.481743 -0.698415 0.689399 0.192212 -0.972001 0.209889 -0.105640 -0.868312 0.296424 0.397702 -0.983711 0.167567 0.065067 -0.974757 0.223264 -0.001340 -0.966727 0.234168 0.102973 -0.937848 0.317043 0.141157 -0.900377 0.397015 0.178047 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 0 0 9 2 2 10 5 6 11 6 7 12 3 3 13 0 0 14 2 8 15 7 9 16 5 6 17 8 10 18 0 11 19 5 6 20 0 11 19 9 10 21 6 12 22 0 11 19 8 10 18 9 10 21 10 13 23 8 10 18 5 6 20 6 12 22 9 10 21 11 10 24 5 6 25 12 14 26 10 13 27 11 10 24 9 10 21 13 10 28 6 12 22 11 10 24 14 13 29 5 6 25 15 14 30 12 14 26 10 13 27 12 14 26 16 14 31 14 13 32 17 14 33 6 12 34 5 6 25 18 14 35 15 14 30 16 14 31 12 14 26 19 14 36 14 13 32 20 14 37 17 14 33 18 14 35 5 6 25 21 15 38 22 14 39 15 14 30 18 14 35 23 14 40 16 14 31 19 14 36 24 14 41 20 14 37 14 13 32 17 14 33 20 14 37 25 14 42 5 6 43 26 16 44 21 15 45 16 14 31 23 14 40 27 14 46 28 14 47 20 14 37 24 14 41 29 14 48 25 14 42 20 14 37 17 14 33 25 14 42 30 14 49 27 14 46 23 14 40 31 14 50 32 14 51 20 14 37 28 14 47 25 14 42 29 14 48 33 14 52 34 14 53 17 14 33 30 14 49 27 14 46 31 14 50 32 14 51 32 14 51 35 14 54 20 14 37 36 14 55 25 14 42 33 14 52 17 14 33 34 14 53 37 14 56 32 14 51 31 14 50 38 14 57 32 14 51 38 14 57 35 14 54 37 14 56 34 14 53 39 14 58 38 14 57 31 14 50 40 14 59

+
+
+
+ + + + -11.441491 71.675172 32.990159 -11.922126 73.358702 36.932046 -10.588838 76.022891 32.940918 -13.233090 70.799441 40.913459 -12.007445 77.784095 41.538561 -12.794871 70.480030 38.470254 -10.948054 78.345121 36.894328 -10.621358 80.288599 37.138313 -10.808004 82.606245 40.244338 -9.604776 86.698435 38.240533 + + + + + + + + + + -0.962492 0.186528 -0.197020 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + -18.320902 41.973052 39.701474 -22.650676 39.929706 37.563539 -18.871173 41.111461 36.387667 -23.541890 40.133649 40.279956 -15.621844 43.051027 40.044655 -17.850038 42.966798 43.833399 -21.838555 41.046090 41.669837 + + + + + + + + + + 0.344626 -0.920888 0.182205 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 29.989447 113.056699 35.000459 26.556019 112.470384 34.461283 27.645165 112.611318 34.345232 24.808408 112.524894 36.435701 31.007425 113.493366 36.835001 22.664018 112.421533 37.773633 29.897193 113.611662 38.622422 14.978648 111.994439 42.207708 19.959631 111.902353 36.983459 26.981358 113.339017 39.599969 14.120719 111.007679 36.719106 26.547312 113.655387 42.019905 15.326210 110.860593 34.659555 29.454364 114.422153 44.198949 20.065293 111.226815 32.580733 27.899171 113.848692 41.992957 18.086173 112.756341 44.169110 16.636770 110.596281 31.755240 24.965692 113.873411 44.881654 18.083888 110.760797 31.456163 29.212300 114.598590 45.548529 20.084710 113.610618 47.751608 25.146286 113.917738 44.995952 22.675961 113.835344 46.770938 28.106974 114.672667 47.049633 21.294669 113.907352 48.515793 + + + + + + + + + + 0.142873 -0.977772 0.153455 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 30.617218 97.520032 94.680947 34.308138 100.689349 90.498125 28.308890 98.966114 87.671847 + + + + + + + + + + -0.393987 0.865835 0.308388 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + -12.866872 55.665131 45.224997 -11.833065 54.248404 39.833725 -11.292335 55.125405 36.104994 -12.433447 52.751685 44.267562 -12.541329 57.693776 42.137216 -11.424069 51.684364 38.826229 -10.879668 54.548285 33.959462 -11.794399 50.296295 41.822231 -11.040080 58.202267 32.867030 -10.548252 48.448465 35.402320 -11.554535 60.235244 34.803975 -10.416518 51.889506 32.681086 -12.097092 60.630705 37.828896 -10.030728 50.337178 31.244183 -13.098123 61.691942 43.223692 -9.813111 45.978300 32.390199 -12.895337 65.136462 40.075278 -10.155339 46.227746 34.298226 + + + + + + + + + + 0.982127 0.092179 0.164106 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + -3.802024 99.802116 38.292825 -4.672383 96.392336 33.992493 -2.541269 100.638187 35.235718 -5.030951 98.408709 39.963610 -6.443867 95.394391 38.688199 -5.676844 95.750981 36.484156 -7.619345 92.664838 37.125168 + + + + + + + + + + 0.845982 -0.487723 0.215500 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + -7.818214 57.894858 92.967462 -1.314283 70.584361 95.425300 -3.534166 74.115741 85.209208 + + + + + + + + + + -0.866749 0.382180 0.320446 -0.871990 0.372086 0.318096 -0.906147 0.306236 0.291749 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 39.729417 20.204081 33.522562 35.365939 18.887908 32.179682 36.480815 19.322051 31.802201 41.169610 20.286703 36.556207 33.148180 18.013186 33.012364 28.135797 16.470461 31.696736 25.362109 15.780257 29.764838 39.381364 18.975058 41.692372 15.425904 12.951477 25.467529 26.645694 15.685665 33.707377 31.918659 17.965003 30.257879 21.737638 14.556346 29.611427 29.741514 16.203027 37.726620 19.901156 14.709096 23.842453 16.975517 12.798418 30.513335 24.219334 15.007140 32.567822 27.209424 15.383194 37.360232 33.721843 18.914690 27.824790 20.167001 13.814760 31.100232 35.843083 17.655378 42.462122 20.800136 15.259079 22.066007 20.328067 13.728340 32.143905 26.066649 14.610077 40.163216 37.663241 20.250864 27.953009 17.052708 12.609525 32.099462 30.952109 14.963207 49.917535 37.648303 20.612822 25.249934 22.729230 14.222344 34.578462 23.000967 13.680464 39.255815 20.569301 15.736876 17.963965 15.525392 11.511821 36.320119 21.349798 12.982089 40.222782 30.170440 18.549072 21.536066 20.866600 13.241349 37.091763 18.833156 11.978669 41.247309 36.346816 20.603253 22.029128 23.812678 16.769185 18.564522 18.078108 11.351459 43.956400 32.999243 19.656345 20.536216 24.478469 17.119839 17.666151 27.132996 13.790590 48.894199 28.397351 18.291123 18.951606 19.950903 11.585405 46.969750 26.730739 18.153015 15.753952 22.988310 11.401145 56.007721 20.044018 10.305751 56.628041 + + + + + + + + + + 0.046562 -0.982521 -0.180233 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 8.772389 38.541668 31.967416 9.770450 42.464831 33.779494 9.707799 41.523585 31.054254 8.322828 37.261009 32.979167 7.968341 36.767467 35.716953 9.116186 41.041506 36.907064 8.365021 38.657715 37.680901 10.026279 44.642921 38.649750 7.708929 37.061041 40.180773 10.232025 45.925731 40.804680 6.094723 32.308503 43.234303 6.789116 33.787701 39.796795 6.690226 34.304766 43.020864 9.228921 42.878032 42.347631 7.904955 38.671926 43.694411 8.608100 41.501268 45.216919 7.017382 35.955883 44.986903 7.472036 38.239988 47.679771 6.549193 35.304034 48.602846 7.064435 37.188551 49.009283 + + + + + + + + + + -0.957786 0.277812 -0.073932 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 59.298235 17.328273 96.737323 -0.845325 37.555086 76.462459 39.167058 37.548494 94.703791 12.179671 31.830741 100.836908 28.399719 19.646615 105.660429 8.446077 25.415689 102.243154 + + + + + + + + + + 0.707582 0.706376 0.018989 0.186328 0.893589 -0.408389 0.327842 0.756498 0.565890 0.040932 0.634613 0.771746 0.283238 0.370936 0.884411 0.201049 0.096674 0.974799 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 2 3 1 1 4 3 3 5 0 0 6 2 2 7 4 4 8 5 5 9 2 2 10 3 3 11

+
+
+
+ + + + -15.900261 15.039448 69.255944 -18.484464 32.122191 59.737059 -16.770451 16.811183 56.148488 -0.010022 14.716124 82.024426 -15.632430 25.816655 55.219230 -10.135872 27.521555 72.994616 0.081983 6.614510 68.459835 -13.648736 28.826479 52.637575 -15.923402 18.494400 55.076278 -5.024092 21.209450 77.701781 -10.949680 30.655934 74.323013 -8.236053 5.627789 56.232803 12.590138 -1.579159 94.423119 -10.303192 29.662624 47.962120 -14.431937 19.126081 53.011559 -1.900724 19.157570 82.339020 -6.879834 25.698232 78.136880 -10.120050 32.475996 77.862435 10.030316 -3.357651 67.950251 6.888128 14.930925 93.032736 17.681498 1.183779 80.023378 -8.759205 32.730763 46.007716 -7.353463 35.856544 87.041841 3.566204 -6.202736 55.628255 4.927821 22.642151 94.169025 17.708227 1.174657 80.036651 -13.479606 36.195019 62.728445 -4.275081 25.847731 83.819649 -7.088111 30.840151 82.711261 -11.888985 41.476164 74.343467 2.166058 -2.531685 44.046408 25.601156 -17.696916 44.004529 13.340889 19.431315 98.752009 38.422447 -5.895007 90.323278 27.145172 -5.047704 80.136831 -12.532369 36.016447 60.754984 -13.357468 36.996947 64.680691 10.592446 29.257354 93.056807 -2.851464 3.580348 47.669629 6.549731 -6.064202 46.953611 16.287801 -12.264219 46.510347 15.097483 12.818697 97.787391 15.914886 2.414795 100.256956 22.625442 -11.662395 68.083957 -6.168047 38.231961 56.631412 8.446077 25.415689 102.243154 -11.793603 40.441278 71.424583 -1.298169 3.434645 44.738394 -6.482205 10.049459 48.171065 4.145687 -3.812735 44.042871 9.205579 -8.062381 48.120026 19.248444 -13.585973 44.015882 52.172390 -5.961725 105.634170 -5.212564 36.734972 51.118233 -11.097123 39.651833 68.213041 -3.882240 6.763286 46.416959 10.616463 -8.420691 45.793523 25.645351 13.386643 112.561717 -5.931927 38.474635 56.907694 -4.447827 35.995828 47.933458 -10.380919 39.516815 66.720913 -9.646915 41.518169 70.915356 -5.937007 11.423970 45.665010 14.705796 -11.065789 45.781290 28.399719 19.646615 105.660429 59.298235 17.328273 96.737323 -2.343135 39.993740 55.304126 -8.232267 40.041255 64.730806 -5.367850 17.454813 49.097876 61.353348 -14.859916 91.189140 -1.373792 39.316932 51.963003 -8.546180 40.533680 66.543720 -3.361702 18.843351 48.662039 -3.582867 15.516369 46.707561 46.528371 -5.934339 99.349419 -5.522133 20.399964 51.056967 + + + + + + + + + + -0.743864 -0.442098 0.501214 -0.977024 0.160324 0.140430 -0.959191 -0.041834 -0.279649 -0.775287 -0.340834 0.531754 -0.977024 0.160324 0.140430 -0.816149 0.043681 -0.576188 -0.835656 -0.383580 0.393122 -0.977024 0.160324 0.140430 -0.526651 -0.799840 0.287915 -0.526651 -0.799840 0.287915 -0.743864 -0.442098 0.501214 -0.720263 -0.692655 -0.038076 -0.455086 -0.881382 0.126737 -0.775287 -0.340834 0.531754 -0.720263 -0.692655 -0.038076 -0.457534 -0.783279 0.420877 -0.657842 -0.179989 0.731333 -0.170393 -0.954828 -0.243453 -0.725097 0.481553 -0.492281 -0.764564 -0.104854 0.635962 -0.568053 -0.812580 0.130498 -0.721887 0.012836 0.691892 -0.375101 -0.918656 0.123980 -0.483904 0.819714 -0.306441 -0.977024 0.160324 0.140430 -0.764564 -0.104854 0.635962 -0.775287 -0.340834 0.531754 -0.842367 0.537699 0.036024 -0.720263 -0.692655 -0.038076 -0.701790 -0.665712 -0.253610 -0.426540 -0.900734 0.082108 -0.657842 -0.179989 0.731333 -0.514016 -0.293342 0.806063 -0.721887 0.012836 0.691892 -0.299903 -0.908262 0.291749 0.503633 0.653544 -0.565008 0.711028 -0.674761 -0.197832 -0.443797 -0.766220 0.464706 -0.479231 -0.718673 0.503832 -0.443797 -0.766220 0.464706 -0.721887 0.012836 0.691892 -0.552523 0.574636 0.603748 -0.809341 -0.422621 -0.407871 -0.720263 -0.692655 -0.038076 -0.532947 -0.823030 -0.196444 -0.362844 -0.021361 0.931605 -0.273961 -0.440371 0.854996 -0.479231 -0.718673 0.503832 -0.462553 -0.692529 0.553578 -0.747387 0.656812 0.100049 -0.760984 -0.484605 -0.431348 -0.071735 -0.519663 0.851355 -0.362844 -0.021361 0.931605 -0.657842 -0.179989 0.731333 -0.071735 -0.519663 0.851355 -0.273961 -0.440371 0.854996 -0.706226 -0.045273 -0.706537 -0.263408 0.099494 0.959540 -0.071735 -0.519663 0.851355 -0.362844 -0.021361 0.931605 -0.416627 0.077998 0.905725 -0.071735 -0.519663 0.851355 0.643335 0.093433 0.759862 -0.483977 0.466946 -0.740086 -0.307518 -0.654478 0.690718 0.351049 -0.681021 0.642631 -0.071735 -0.519663 0.851355 -0.355811 -0.879619 0.315703 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 3 3 9 5 6 10 1 7 11 3 3 12 0 0 13 6 8 14 1 1 6 7 5 15 4 5 7 4 5 7 8 5 16 2 2 8 5 6 10 3 3 9 9 6 17 10 6 18 1 7 11 5 6 10 6 9 19 0 10 20 11 11 21 12 12 22 3 13 23 6 9 24 1 1 6 13 5 25 7 5 15 8 5 16 4 5 7 14 5 26 9 6 17 3 3 9 15 6 27 16 6 28 5 6 10 9 6 17 17 6 29 1 7 11 10 6 18 11 14 30 18 15 31 6 9 32 12 12 33 19 16 34 3 3 35 20 17 36 12 12 37 6 9 38 13 5 25 1 1 6 21 18 39 15 6 27 3 3 9 22 19 40 1 7 11 17 6 29 22 19 40 18 15 31 11 14 30 23 20 41 24 21 42 3 3 43 19 16 44 25 22 45 12 12 46 20 17 47 26 23 48 21 18 49 1 24 50 24 21 51 22 25 52 3 26 53 15 6 27 22 19 40 27 6 54 22 19 40 17 6 29 28 6 55 22 19 56 29 27 57 1 24 58 11 28 59 30 29 60 23 20 61 31 30 62 18 15 63 23 20 64 19 31 65 32 32 66 24 33 67 33 34 68 12 12 69 25 22 70 25 35 71 18 36 72 20 37 73 34 38 74 25 22 75 20 39 76 21 18 49 26 23 48 35 23 77 1 24 50 36 23 78 26 23 48 24 40 79 37 41 80 22 25 81 22 19 40 28 6 55 27 6 54 36 23 78 1 24 50 29 27 82 38 42 83 30 29 84 11 43 85 30 29 86 39 44 87 23 20 88 40 44 89 31 30 90 23 20 88 41 45 91 32 32 92 19 31 93 33 34 94 42 46 95 12 12 96 33 34 97 25 22 98 34 47 99 43 48 100 20 39 101 18 36 102 20 39 101 43 48 100 34 38 103 21 18 49 35 23 77 44 23 104 37 41 105 24 40 106 45 49 107 36 23 78 29 27 82 46 23 108 30 29 84 38 42 83 47 42 109 38 42 83 11 43 85 48 50 110 39 44 87 30 29 86 49 44 111 23 20 88 39 44 87 50 44 112 31 30 90 40 44 89 51 44 113 40 44 89 23 20 88 50 44 112 52 51 114 32 32 115 41 52 116 42 46 117 41 45 118 19 53 119 52 54 120 42 55 121 33 34 122 21 18 49 44 23 104 53 23 123 54 23 124 36 23 78 46 23 108 2 56 125 48 50 126 11 43 127 48 50 110 55 42 128 38 42 83 56 44 129 40 44 89 50 44 112 57 57 130 32 32 131 52 58 132 52 51 133 41 59 134 42 55 135 53 23 123 44 23 104 58 23 136 21 18 49 53 23 123 59 23 137 60 23 138 36 23 78 54 23 124 61 23 139 54 23 124 46 23 108 48 50 126 2 56 125 62 56 140 40 44 89 56 44 129 63 44 141 64 60 142 32 32 143 57 57 144 52 61 145 65 62 146 57 57 147 66 23 148 53 23 123 58 23 136 36 23 78 60 23 138 67 23 149 2 56 150 68 63 151 62 56 152 64 60 153 45 64 154 32 32 155 52 51 156 69 65 157 65 62 158 53 23 123 66 23 148 70 23 159 67 23 149 60 23 138 71 23 160 2 56 150 72 63 161 68 63 151 62 56 152 68 63 151 73 63 162 69 65 163 52 66 164 74 67 165 72 63 161 2 56 150 75 63 166

+
+
+
+ + + + 50.895399 -22.319817 84.413751 56.105517 -13.343233 93.463006 36.310013 -13.522812 81.768390 70.454641 -22.816819 86.539296 38.422447 -5.895007 90.323278 37.009023 -17.733769 68.851843 72.364835 -25.578415 73.164622 27.145172 -5.047704 80.136831 47.053695 -20.639534 76.316264 51.878836 -21.710058 76.904247 47.308153 -20.018910 70.108153 60.949949 -23.269901 73.838052 38.304025 -19.618698 53.986358 52.184010 -21.494647 74.332571 63.855786 -23.690452 72.126669 32.834087 -18.596485 51.518861 46.267530 -20.123366 66.827700 68.927450 -25.376380 67.473171 28.672211 -18.403058 44.146446 42.697751 -20.873142 51.893246 64.781926 -26.643710 46.400243 25.601156 -17.696916 44.004529 33.207355 -19.121641 47.404759 47.004036 -21.348015 56.937926 63.236131 -23.812836 69.607310 67.147425 -26.262062 55.213292 22.625442 -11.662395 68.083957 50.467047 -21.288459 65.145911 57.852384 -22.486526 70.189819 69.364738 -27.485847 48.601955 68.302654 -25.991584 60.308088 51.054279 -22.504390 55.008445 55.722600 -22.340586 66.858648 69.046366 -27.037924 52.111005 74.781339 -27.554279 59.920805 55.590407 -24.592484 45.390366 72.492209 -27.612590 54.317000 71.495656 -26.210523 65.300870 51.527202 -23.294745 48.620566 75.353906 -27.317449 63.412399 49.326262 -22.313834 52.984128 + + + + + + + + + + -0.290355 -0.870360 0.397702 -0.311530 -0.614842 0.724512 -0.534264 -0.694612 0.481743 -0.074891 -0.871033 0.485481 -0.311530 -0.614842 0.724512 -0.387090 -0.638663 0.665035 -0.420218 -0.898851 0.124430 -0.160695 -0.984857 0.065067 -0.534264 -0.694612 0.481743 -0.684506 -0.703211 0.192212 -0.203099 -0.973443 -0.105640 -0.290355 -0.870360 0.397702 -0.216453 -0.976292 -0.001340 -0.227413 -0.968339 0.102973 -0.310488 -0.940038 0.141157 -0.390719 -0.903127 0.178047 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 0 0 9 2 2 10 5 6 11 6 7 12 3 3 13 0 0 14 2 8 15 7 9 16 5 6 17 8 10 18 0 11 19 5 6 20 0 11 19 9 10 21 6 7 22 0 11 19 8 10 18 9 10 21 10 12 23 8 10 18 5 6 20 6 7 22 9 10 21 11 10 24 5 6 25 12 13 26 10 12 27 11 10 24 9 10 21 13 10 28 6 7 22 11 10 24 14 12 29 5 6 25 15 13 30 12 13 26 10 12 27 12 13 26 16 13 31 14 12 32 17 13 33 6 7 34 5 6 25 18 13 35 15 13 30 16 13 31 12 13 26 19 13 36 14 12 32 20 13 37 17 13 33 18 13 35 5 6 25 21 14 38 22 13 39 15 13 30 18 13 35 23 13 40 16 13 31 19 13 36 24 13 41 20 13 37 14 12 32 17 13 33 20 13 37 25 13 42 5 6 43 26 15 44 21 14 45 16 13 31 23 13 40 27 13 46 28 13 47 20 13 37 24 13 41 29 13 48 25 13 42 20 13 37 17 13 33 25 13 42 30 13 49 27 13 46 23 13 40 31 13 50 32 13 51 20 13 37 28 13 47 25 13 42 29 13 48 33 13 52 34 13 53 17 13 33 30 13 49 27 13 46 31 13 50 32 13 51 32 13 51 35 13 54 20 13 37 36 13 55 25 13 42 33 13 52 17 13 33 34 13 53 37 13 56 32 13 51 31 13 50 38 13 57 32 13 51 38 13 57 35 13 54 37 13 56 34 13 53 39 13 58 38 13 57 31 13 50 40 13 59

+
+
+
+ + + + 41.192688 -14.653011 32.360133 39.327740 -15.199548 36.735628 36.360229 -13.740281 32.305476 42.178609 -16.634851 41.154996 34.416334 -15.328544 41.848860 42.529751 -16.145965 38.443039 33.785401 -14.156996 36.693760 31.625661 -13.809432 36.964584 29.054583 -14.034564 40.412272 24.503039 -12.730725 38.188049 + + + + + + + + + + -0.179804 -0.963771 -0.197020 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + 74.214547 -22.058803 39.809693 76.516158 -26.848901 37.436585 75.175155 -22.662913 36.131367 76.296693 -27.839705 40.451808 72.997109 -19.071276 40.190623 73.107867 -21.543858 44.396130 75.270709 -25.956120 41.994576 + + + + + + + + + + 0.918460 0.351046 0.182205 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + -5.060745 31.013536 34.591567 -4.383345 27.207067 33.993081 -4.548217 28.414897 33.864264 -4.430306 25.266844 36.184685 -5.553321 32.140080 36.627908 -4.298961 22.887430 37.669790 -5.676023 30.906836 38.611946 -3.765343 14.360187 42.591613 -3.701729 19.889656 36.792697 -5.350799 27.672450 39.697022 -2.663418 13.415556 36.499264 -5.698598 27.188220 42.383151 -2.509498 14.754757 34.213163 -6.572215 30.409027 44.801891 -2.952718 20.012173 31.905671 -5.923637 28.687249 42.353239 -4.635115 17.803551 44.768769 -2.226274 16.211492 30.989373 -5.928343 25.430975 45.559693 -2.420097 17.816479 30.657397 -6.766180 30.138976 46.299924 -5.598826 20.015253 48.745342 -5.978944 25.631086 45.686564 -5.868346 22.889730 47.656798 -6.839838 28.911519 47.966149 -5.937569 21.355976 49.593588 + + + + + + + + + + 0.976751 0.149695 0.153455 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 12.179671 31.830741 100.836908 8.633213 35.903004 96.193976 10.592446 29.257354 93.056807 + + + + + + + + + + -0.863064 -0.400022 0.308388 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 58.974446 -16.111080 45.940804 60.538963 -14.952604 39.956492 59.561325 -14.359205 35.817601 62.204933 -15.607414 44.878051 56.720182 -15.765457 42.513367 63.381809 -14.478760 38.838171 60.198716 -13.896682 33.436059 64.925398 -14.879060 42.163734 56.144137 -14.103051 32.223461 66.966783 -13.481552 35.037633 53.891575 -14.689836 34.373470 63.146299 -13.361996 32.017062 53.456827 -15.295124 37.731131 64.866352 -12.921750 30.422100 52.286640 -16.414465 43.719355 69.702901 -12.646423 31.694178 48.461745 -16.216071 40.224616 69.428676 -13.028220 33.812088 + + + + + + + + + + -0.099033 0.981459 0.164106 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 9.913341 -6.391371 38.246092 13.704849 -7.331023 33.472724 8.975554 -4.998445 34.852704 11.469508 -7.744648 40.100664 14.826269 -9.289588 38.684958 14.424520 -8.440977 36.238470 17.865107 -10.573185 36.949993 + + + + + + + + + + 0.481805 0.849367 0.215500 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 56.460386 -10.524485 98.934940 42.324980 -3.403631 101.663140 38.422447 -5.895007 90.323278 + + + + + + + + + + -0.376119 -0.869396 0.320446 -0.365990 -0.874567 0.318096 -0.299903 -0.908262 0.291749 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 75.247087 85.024185 41.588876 79.343159 85.309539 40.379074 78.277073 85.150660 40.039001 73.965368 85.239053 44.321890 81.466077 85.635344 41.129238 86.177437 85.990210 39.943988 88.751919 86.042953 38.203540 75.798160 86.034428 48.949065 98.045609 86.544874 34.332090 87.643153 86.382249 41.755376 82.555844 85.432269 38.647721 92.180347 86.394755 38.065332 84.820172 86.545644 45.376317 93.763233 85.894014 32.868058 96.714803 86.988631 38.877862 89.910170 86.494056 40.728750 87.208279 86.760491 45.046237 80.782173 84.957860 36.455749 93.708187 86.732760 39.406597 79.169986 86.487567 49.642534 92.863694 85.590277 31.267655 93.583938 86.840829 40.346843 88.366531 87.211593 47.571447 77.052912 84.570690 36.571262 96.684692 87.169982 40.306804 84.004108 87.876469 56.359122 76.993791 84.249724 34.136059 91.375885 86.886095 42.540138 91.245316 87.416381 46.753969 92.971121 85.124452 27.572123 98.245553 87.829488 44.109198 92.835281 87.700351 47.625111 83.975101 84.570225 30.790232 93.208030 87.376144 44.804373 95.246459 88.079563 48.548108 78.139074 83.998363 31.234431 89.915730 84.864912 28.113165 96.034966 88.479873 50.988731 81.268956 84.162080 29.889465 89.260836 84.689745 27.303821 87.593295 88.144359 55.437198 85.584267 84.442939 28.461889 94.342997 88.648145 53.703460 87.075973 84.231624 25.581120 91.711373 89.416266 61.845776 94.516604 89.790923 62.404624 + + + + + + + + + + 0.172271 0.968421 -0.180233 + + + + + + + + + + 0.692621 0.705234 0.436466 0.726125 0.496647 0.680620 0.796101 0.842734 0.317351 0.821203 0.024762 0.857851 -0.145947 0.811025 0.732690 1.168272 -0.738302 0.789509 -0.044664 1.002624 0.227804 0.694641 -0.351686 0.882003 0.159489 1.158735 -0.497533 0.600726 -0.613936 1.036769 -0.189972 0.992344 0.013863 1.193872 0.311817 0.519450 -0.429472 0.999455 0.538565 1.288818 -0.459767 0.481978 -0.412751 1.054097 -0.030163 1.375191 0.535264 0.440114 -0.597984 1.123489 0.316944 1.811717 0.514662 0.289767 -0.259398 1.137117 -0.209868 1.391879 -0.502783 0.258387 -0.653356 1.392270 -0.296015 1.482010 0.065356 0.246831 -0.346181 1.318084 -0.430602 1.594341 0.417644 0.138791 -0.315290 0.220696 -0.453425 1.761918 0.217746 0.129027 -0.284270 0.156010 0.093858 1.838475 -0.053635 0.141674 -0.325671 1.888794 -0.171096 0.000000 -0.088131 2.325940 -0.249815 2.425123 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 5 0 5 4 0 4 3 0 3 6 0 6 4 0 4 5 0 5 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 5 0 5 7 0 7 9 0 9 8 0 8 10 0 10 4 0 4 11 0 11 8 0 8 6 0 6 9 0 9 7 0 7 12 0 12 13 0 13 10 0 10 8 0 8 8 0 8 11 0 11 14 0 14 15 0 15 11 0 11 6 0 6 12 0 12 7 0 7 16 0 16 13 0 13 17 0 17 10 0 10 14 0 14 11 0 11 18 0 18 16 0 16 7 0 7 19 0 19 20 0 20 17 0 17 13 0 13 14 0 14 18 0 18 21 0 21 16 0 16 19 0 19 22 0 22 20 0 20 23 0 23 17 0 17 14 0 14 21 0 21 24 0 24 22 0 22 19 0 19 25 0 25 20 0 20 26 0 26 23 0 23 24 0 24 21 0 21 27 0 27 22 0 22 25 0 25 28 0 28 29 0 29 26 0 26 20 0 20 24 0 24 27 0 27 30 0 30 28 0 28 25 0 25 31 0 31 32 0 32 26 0 26 29 0 29 30 0 30 27 0 27 33 0 33 31 0 31 25 0 25 34 0 34 32 0 32 35 0 35 26 0 26 36 0 36 32 0 32 29 0 29 30 0 30 33 0 33 31 0 31 34 0 34 25 0 25 37 0 37 30 0 30 31 0 31 34 0 34 35 0 35 32 0 32 38 0 38 39 0 39 32 0 32 36 0 36 37 0 37 25 0 25 40 0 40 39 0 39 41 0 41 32 0 32 37 0 37 40 0 40 42 0 42 41 0 41 39 0 39 43 0 43 42 0 42 40 0 40 44 0 44 42 0 42 44 0 44 45 0 45

+
+
+
+ + + + 98.783179 62.735563 40.187844 97.123332 59.488221 41.820347 97.366238 60.302612 39.365176 99.433734 63.770910 41.099332 99.843663 64.133740 43.565805 97.982197 60.608043 44.637977 99.117894 62.552307 45.335128 96.463850 57.625802 46.207965 100.012965 63.824054 47.587265 96.027060 56.539902 48.149343 102.379640 67.677035 50.338193 101.474369 66.516138 47.241339 101.458042 66.042152 50.145905 97.516600 59.017132 49.539389 99.519231 62.447997 50.752705 98.336792 60.102725 52.124333 100.841079 64.656922 51.917112 99.985768 62.741052 54.343119 101.382495 65.136132 55.174718 100.553711 63.583398 55.540877 + + + + + + + + + + 0.872436 -0.483104 -0.073932 + + + + + + + + + + -0.093458 -0.492105 -0.256792 -0.414166 -0.219594 -0.531382 -0.038749 -0.448589 -0.015626 -0.330834 -0.194249 -0.279645 -0.093376 -0.246362 -0.344119 -0.204691 -0.024091 -0.138839 -0.396170 -0.112005 0.178329 -0.007504 0.113168 -0.155355 0.094232 -0.016684 -0.266723 -0.045640 -0.088753 0.012286 -0.206361 0.077771 0.026535 0.067878 -0.067138 0.183701 0.057074 0.223404 -0.021787 0.240885 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 5 0 5 6 0 6 7 0 7 7 0 7 6 0 6 8 0 8 7 0 7 8 0 8 9 0 9 8 0 8 10 0 10 9 0 9 10 0 10 8 0 8 11 0 11 9 0 9 10 0 10 12 0 12 9 0 9 12 0 12 13 0 13 14 0 14 13 0 13 12 0 12 13 0 13 14 0 14 15 0 15 16 0 16 14 0 14 12 0 12 14 0 14 16 0 16 17 0 17 17 0 17 16 0 16 18 0 18 17 0 17 18 0 18 19 0 19

+
+
+
+ + + + 58.629619 91.456423 98.539112 107.429379 61.682658 80.273468 72.279299 69.674650 96.707101 97.129293 69.311270 102.232431 85.311640 83.252595 106.577946 101.689704 74.201777 103.499320 + + + + + + + + + + -0.846495 -0.532058 0.018989 -0.379671 -0.830102 -0.408389 -0.487295 -0.665066 0.565890 -0.180512 -0.609774 0.771746 -0.358380 -0.298967 0.884411 -0.217471 -0.049730 0.974799 + + + + + + + + + + -3.499458 3.381980 -1.366679 1.641546 -2.356405 3.207417 -3.084065 2.549389 -1.484686 1.730840 -1.972995 2.824602 -3.984585 -0.149102 -2.843260 -0.335996 -2.936378 0.670986 -1.745831 -0.770064 -1.841858 -2.219690 -1.577752 -1.040449 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 2 2 3 1 1 4 3 3 5 0 0 6 2 2 7 4 4 8 5 5 9 2 2 10 3 3 11

+
+
+
+ + + + 125.149305 78.458074 73.781113 124.009960 62.934873 65.205540 125.560151 76.727898 61.972593 111.254057 81.913703 85.284249 122.762954 69.043615 61.135424 117.593870 68.642915 77.149285 112.790257 89.049438 73.063897 120.419513 66.795377 58.809609 124.480049 75.418235 61.006639 114.362959 75.208457 81.389975 117.683210 65.726892 78.346040 120.294702 88.256062 62.048553 103.437082 98.744226 96.454243 117.313520 66.728562 54.597488 123.043698 75.160981 59.146532 112.028581 77.634464 85.567667 115.097321 70.894611 81.781956 116.591098 64.293532 81.534708 106.040894 99.795731 72.604812 105.151071 83.101821 95.201646 98.412793 97.333150 83.481503 115.344728 64.341331 52.836763 113.485880 61.875869 89.804444 112.287560 101.004982 61.503915 105.334117 75.936145 96.225329 98.391132 97.346499 83.493461 118.800219 60.355777 67.900483 112.779173 71.283165 86.901568 114.254003 66.335798 85.903020 116.348757 56.033701 78.364467 112.784889 97.500456 51.069819 95.223730 115.500795 51.032090 98.583989 80.436098 100.354144 81.604479 107.691708 92.760692 91.342595 104.696474 83.583714 118.003701 60.701717 66.122590 118.532859 59.675651 69.659263 99.037320 71.255225 95.223332 115.972926 91.129493 54.333981 109.638842 101.478770 53.688921 102.321303 108.869218 53.289584 98.360632 86.595972 99.485119 99.719082 95.899083 101.709952 96.633484 109.605458 72.725268 111.970360 60.025635 62.407660 101.689704 74.201777 103.499320 116.471519 56.961899 75.734841 114.637418 91.567523 51.693229 117.871388 84.721624 54.785726 111.301448 99.020996 51.066632 107.704469 103.764285 54.739745 99.984153 110.621321 51.042318 69.538292 110.494717 106.554289 111.429745 61.531467 57.440833 116.017220 57.794449 72.841561 116.243184 88.127505 53.205450 106.536505 104.360668 52.643796 88.980836 88.202308 112.795323 111.714489 59.859571 62.656563 110.905441 62.333452 54.571666 115.414974 58.056013 71.497301 114.370686 56.444301 75.276079 117.118082 83.622916 52.528019 103.471916 107.500624 52.632776 85.311640 83.252595 106.577946 58.629619 91.456423 98.539112 108.258486 59.241316 61.211907 113.422680 58.024143 69.704411 115.414355 78.438343 55.620691 63.248721 120.144350 93.540748 107.541992 60.029375 58.201887 113.600173 57.528886 71.337667 113.374785 77.618907 55.228045 114.233125 80.497561 53.467254 74.491171 109.344150 100.892351 114.962062 75.820195 57.385637 + + + + + + + + + + 0.823325 0.266310 0.501214 0.917225 -0.372798 0.140430 0.944622 -0.171713 -0.279649 0.831532 0.160600 0.531754 0.823325 0.266310 0.501214 0.917225 -0.372798 0.140430 0.786190 -0.223412 -0.576188 0.831532 0.160600 0.531754 0.899871 0.188910 0.393122 0.690767 0.663284 0.287915 0.690767 0.663284 0.287915 0.855821 0.515869 -0.038076 0.639046 0.758655 0.126737 0.831532 0.160600 0.531754 0.855821 0.515869 -0.038076 0.619698 0.662448 0.420877 0.681370 0.029772 0.731333 0.377699 0.893350 -0.243453 0.600390 -0.630231 -0.492281 0.768794 -0.067140 0.635962 0.733962 0.666535 0.130498 0.701104 -0.172450 0.691892 0.569306 0.812724 0.123980 0.290272 -0.906552 -0.306441 0.702307 -0.710962 0.036024 0.917225 -0.372798 0.140430 0.855821 0.515869 -0.038076 0.831838 0.493688 -0.253610 0.615497 0.783851 0.082108 0.681370 0.029772 0.731333 0.566232 0.172172 0.806063 0.701104 -0.172450 0.691892 0.493674 0.819248 0.291749 -0.635909 -0.525724 -0.565008 -0.543866 0.815520 -0.197832 0.602523 0.648857 0.464706 0.626543 0.594641 0.503832 0.569306 0.812724 0.123980 0.602523 0.648857 0.464706 0.411483 -0.682767 0.603748 0.882859 0.232811 -0.407871 0.831838 0.493688 -0.253610 0.855821 0.515869 -0.038076 0.702044 0.684503 -0.196444 0.358560 -0.059558 0.931605 0.566232 0.172172 0.806063 0.493674 0.819248 0.291749 0.364717 0.368732 0.854996 0.626543 0.594641 0.503832 0.604487 0.572841 0.553578 0.583299 -0.806073 0.100049 0.849437 0.303967 -0.431348 0.185083 0.490856 0.851355 0.358560 -0.059558 0.931605 0.681370 0.029772 0.731333 0.185083 0.490856 0.851355 0.364717 0.368732 0.854996 0.698706 -0.112315 -0.706537 0.234819 -0.155379 0.959540 0.185083 0.490856 0.851355 0.358560 -0.059558 0.931605 0.388994 -0.168363 0.905725 -0.648048 0.051418 0.759862 0.368499 -0.562566 -0.740086 0.444875 0.570083 0.690718 -0.191446 0.741872 0.642631 0.185083 0.490856 0.851355 0.541847 0.778931 0.315703 + + + + + + + + + + 1.527478 1.303994 0.830254 0.895200 1.452184 0.741087 0.482068 2.868077 0.225741 1.445017 0.779754 2.052803 0.683353 0.150491 0.931392 -0.086609 1.296882 -0.037840 1.972089 2.026078 1.331730 1.604843 1.022334 0.986386 2.075662 1.316896 1.495518 0.696606 2.224683 0.657931 0.805779 -0.222096 1.227199 -0.094111 1.649408 1.824429 1.203025 1.666812 2.230409 0.832593 1.501146 0.869113 1.948762 0.271703 2.841004 2.507566 2.020977 1.955881 2.311093 1.352319 0.764853 -0.467467 1.198517 -0.202469 1.777292 2.040753 1.453463 1.844726 1.150220 1.831923 2.166043 0.418858 2.948616 0.987779 2.380068 1.012521 2.599765 2.799368 1.895441 2.729604 1.825680 2.177248 2.584543 2.081921 2.397561 2.717347 1.849592 1.571650 0.637837 -0.570036 1.072781 2.260137 2.840543 0.389505 1.643832 2.732802 1.880632 2.125759 1.963863 2.676005 2.676972 2.062649 2.497506 2.700286 2.675841 2.062060 -0.626512 0.602840 -0.719519 -0.150619 -0.368935 0.468044 1.824572 2.578731 1.121743 2.211863 2.015162 1.953594 1.491845 2.109824 1.261300 2.058118 0.463755 2.413955 0.253642 1.828477 0.652954 1.155026 2.175341 0.751151 2.704798 0.223570 2.849957 0.724978 3.338023 -0.515361 2.565150 0.582365 2.350444 0.017497 2.095418 2.287157 2.043235 2.700653 1.780300 2.369309 3.553941 2.518572 2.497506 2.700286 2.676972 2.062649 -3.046598 1.333943 -2.912767 0.705612 -3.045519 1.333253 3.485296 1.338239 3.028967 1.333346 3.027865 1.332698 -0.655778 0.513913 -0.647216 0.690811 -0.974084 2.729088 -1.307625 2.601667 -0.823709 1.912565 -0.790193 1.126228 2.358757 0.114221 2.671236 -0.055994 2.184870 0.516508 2.578641 0.360650 2.804700 0.487828 2.706640 0.867308 3.270732 0.468437 3.705535 0.358818 2.326106 2.615286 2.055010 2.685089 2.104545 2.271226 3.273563 0.472139 2.348281 0.995722 2.222878 0.688233 3.883130 1.835775 3.028967 1.333346 3.485296 1.338239 3.482281 0.612497 3.028265 1.227443 2.873240 0.605593 3.485693 1.233263 -0.922499 0.328099 -0.706832 2.495751 -0.355271 2.543699 -0.533136 2.891772 -0.772270 0.994699 2.392996 -0.023485 2.059413 0.137778 2.673833 0.360495 2.938543 0.538854 3.400060 0.359315 3.995537 2.752753 2.253144 1.965046 2.512010 1.854639 1.028766 3.845747 0.646357 3.630085 0.681485 3.214872 4.169970 2.464622 2.668494 2.228135 3.632814 1.791255 -0.924994 0.079668 -0.780282 0.849983 1.635155 0.667211 2.104625 0.283627 2.203075 0.671265 2.225593 0.055372 2.994214 0.437079 3.019191 3.130669 2.471447 2.532378 4.344096 2.830541 -2.561968 4.306729 -1.469179 2.912799 -1.338978 3.351502 -0.935688 0.340549 -0.936411 -0.063843 -0.802409 0.782746 -0.869000 0.971753 2.074021 0.163126 3.190827 0.436544 -2.164553 3.597142 -1.590286 3.232698 -1.973657 3.961211 -3.791839 -0.746237 -3.111871 -1.591858 -2.587213 -0.087793 -1.091628 0.268290 -0.887869 0.693069 0.325550 -0.789460 -0.012768 -1.239155 0.178398 -1.458106 3.015783 2.082966 2.187675 1.880264 2.469261 1.673181 -2.075268 0.098948 -2.415094 -0.645557 -1.121580 -0.359600 -1.111437 0.117734 -0.887062 0.774761 -0.109342 -1.266953 -0.006484 -1.391611 4.769830 1.789229 4.291533 2.442222 4.079894 2.158118 -0.094008 -1.114202 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 1 4 0 4 5 1 5 6 4 6 7 2 2 8 3 7 9 5 8 10 1 1 11 3 7 12 0 4 13 6 9 14 1 5 6 7 6 15 4 6 7 4 6 7 8 6 16 2 2 8 5 8 10 3 7 9 9 8 17 10 8 18 1 1 11 5 8 10 6 10 19 0 4 20 11 11 21 12 12 22 3 13 23 6 10 24 1 5 6 13 6 25 7 6 15 8 6 16 4 6 7 14 6 26 9 8 17 3 7 9 15 8 27 16 8 28 5 8 10 9 8 17 17 8 29 1 1 11 10 8 18 11 14 30 18 15 31 6 10 32 12 12 33 19 16 34 3 3 35 20 17 36 12 12 37 6 10 38 13 6 25 1 5 6 21 18 39 15 8 27 3 7 9 22 19 40 1 1 11 17 8 29 22 19 40 18 15 31 11 14 30 23 20 41 24 21 42 3 3 43 19 16 44 25 22 45 12 12 46 20 17 47 26 23 48 21 18 49 1 5 50 24 21 51 22 19 52 3 7 53 15 8 27 22 19 40 27 8 54 22 19 40 17 8 29 28 8 55 22 19 56 29 24 57 1 25 58 11 26 59 30 27 60 23 20 61 31 28 62 18 15 63 23 20 64 19 29 65 32 30 66 24 31 67 33 32 68 12 12 69 25 22 70 25 33 71 18 34 72 20 35 73 34 36 74 25 37 75 20 38 76 21 18 49 26 23 48 35 23 77 1 5 50 36 23 78 26 23 48 24 21 79 37 39 80 22 19 81 22 19 40 28 8 55 27 8 54 36 23 78 1 5 50 29 24 82 38 40 83 30 41 84 11 42 85 30 27 86 39 43 87 23 20 88 40 43 89 31 28 90 23 20 88 41 44 91 32 45 92 19 29 93 33 46 94 42 47 95 12 12 96 33 32 97 25 37 98 34 48 99 43 49 100 20 38 101 18 34 102 20 38 101 43 49 100 34 36 103 21 18 49 35 23 77 44 23 104 37 39 105 24 21 106 45 50 107 36 23 78 29 24 82 46 23 108 30 41 84 38 40 83 47 40 109 38 40 83 11 42 85 48 51 110 39 43 87 30 27 86 49 43 111 23 20 88 39 43 87 50 43 112 31 28 90 40 43 89 51 43 113 40 43 89 23 20 88 50 43 112 52 52 114 32 45 115 41 53 116 42 47 117 41 53 118 19 54 119 52 55 120 42 56 121 33 46 122 21 18 49 44 23 104 53 23 123 54 23 124 36 23 78 46 23 108 2 57 125 48 51 126 11 42 127 48 51 110 55 40 128 38 40 83 56 43 129 40 43 89 50 43 112 57 58 130 32 45 131 52 59 132 52 52 133 41 60 134 42 56 135 53 23 123 44 23 104 58 23 136 21 18 49 53 23 123 59 23 137 60 23 138 36 23 78 54 23 124 61 23 139 54 23 124 46 23 108 48 51 126 2 57 125 62 57 140 40 43 89 56 43 129 63 43 141 64 61 142 32 45 143 57 58 144 52 59 145 65 62 146 57 58 147 66 23 148 53 23 123 58 23 136 36 23 78 60 23 138 67 23 149 2 57 150 68 63 151 62 57 152 64 61 153 45 64 154 32 45 155 52 52 156 69 65 157 65 62 158 53 23 123 66 23 148 70 23 159 67 23 149 60 23 138 71 23 160 2 57 150 72 63 161 68 63 151 62 57 152 68 63 151 73 63 162 69 65 163 52 66 164 74 67 165 72 63 161 2 57 150 75 63 166

+
+
+
+ + + + 73.925111 124.610634 87.436795 67.556289 117.764493 95.589277 84.982737 113.971207 85.053586 56.841261 128.951148 89.351700 81.604479 107.691708 92.760692 85.209127 117.810104 73.417057 55.714327 131.758509 77.302444 91.342595 104.696474 83.583714 76.964724 122.367705 80.141761 72.939444 124.271240 80.671476 76.617307 121.873267 74.548868 65.281689 127.452115 77.909137 84.447670 119.724513 60.024729 72.628350 124.142909 78.354651 62.812812 128.401560 76.367352 89.049054 117.734722 57.801759 77.552356 121.757332 71.593505 58.693790 130.894939 72.175011 92.666709 116.734112 51.159943 80.838103 121.703515 58.139043 62.588636 131.180886 53.190391 95.223730 115.500795 51.032090 88.825952 118.270580 54.095360 77.149757 122.980203 62.683799 63.381614 128.385397 74.097659 60.434341 131.317741 61.130075 96.633484 109.605458 72.725268 74.095571 123.619076 70.078380 67.846583 126.145656 74.622441 58.730662 132.835412 55.173915 59.365472 131.310699 65.719981 73.822372 124.804495 60.945528 69.688497 125.592356 71.621387 58.920954 132.378362 58.335222 53.985766 133.976648 65.371077 70.254094 127.544293 52.280592 56.008435 133.570980 60.322605 56.604077 132.140341 70.217983 73.564652 125.593224 55.190682 53.435489 133.882870 68.516658 75.302423 124.292189 59.121819 + + + + + + + + + + 0.475967 0.784403 0.397702 0.440006 0.530544 0.724512 0.674877 0.558985 0.481743 0.266006 0.832796 0.485481 0.440006 0.530544 0.724512 0.518965 0.537033 0.665035 0.608914 0.783416 0.124430 0.374895 0.924781 0.065067 0.674877 0.558985 0.481743 0.823291 0.534084 0.192212 0.413716 0.904256 -0.105640 0.475967 0.784403 0.397702 0.374895 0.924781 0.065067 0.427370 0.904076 -0.001340 0.436296 0.893892 0.102973 0.511036 0.847889 0.141157 0.581096 0.794120 0.178047 + + + + + + + + + + 4.683062 1.434497 4.602871 2.018963 4.006029 1.263641 4.820583 -0.400103 4.193260 0.033347 4.036001 -0.533170 4.640144 1.587065 3.873799 1.406734 3.960480 0.915381 4.666745 1.749991 3.979611 1.633222 4.096436 1.063072 4.666168 0.813439 4.583891 1.398433 3.794209 1.305464 3.995775 1.828709 3.492004 1.757397 4.134572 1.264147 4.015669 1.792582 4.181345 2.141865 3.594752 1.470605 4.215182 1.817944 5.056671 1.656636 4.020608 1.524796 4.586528 1.685684 3.644886 1.025998 3.713179 0.384959 4.070744 1.080173 4.225466 1.707015 4.704832 1.611864 3.488770 0.278554 4.030811 0.938712 4.754971 1.167217 4.969853 0.966546 5.106811 1.211977 3.323418 -0.039365 3.897435 0.294698 4.818627 0.057825 3.196205 -0.045485 3.508286 0.101142 4.071059 0.512238 4.731747 1.058576 4.908071 0.437867 3.934177 0.983914 3.303923 0.950442 3.510908 -0.099179 4.206610 0.866188 4.507919 1.083695 5.006509 0.152769 4.950976 0.657568 4.240911 0.429034 4.422869 0.940046 4.989862 0.304088 5.220019 0.640868 4.438458 0.014276 5.130592 0.399217 5.078485 0.872871 4.266792 0.153571 5.240335 0.791435 4.171240 0.341740 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 1 1 6 4 5 7 2 2 8 0 0 9 2 2 10 5 6 11 6 7 12 3 3 13 0 0 14 2 8 15 7 9 16 5 6 17 8 10 18 0 11 19 5 6 20 0 11 19 9 10 21 6 12 22 0 11 19 8 10 18 9 10 21 10 13 23 8 10 18 5 6 20 6 12 22 9 10 21 11 10 24 5 6 25 12 14 26 10 13 27 11 10 24 9 10 21 13 10 28 6 12 22 11 10 24 14 13 29 5 6 25 15 14 30 12 14 26 10 13 27 12 14 26 16 14 31 14 13 32 17 14 33 6 12 34 5 6 25 18 14 35 15 14 30 16 14 31 12 14 26 19 14 36 14 13 32 20 14 37 17 14 33 18 14 35 5 6 25 21 15 38 22 14 39 15 14 30 18 14 35 23 14 40 16 14 31 19 14 36 24 14 41 20 14 37 14 13 32 17 14 33 20 14 37 25 14 42 5 6 43 26 16 44 21 15 45 16 14 31 23 14 40 27 14 46 28 14 47 20 14 37 24 14 41 29 14 48 25 14 42 20 14 37 17 14 33 25 14 42 30 14 49 27 14 46 23 14 40 31 14 50 32 14 51 20 14 37 28 14 47 25 14 42 29 14 48 33 14 52 34 14 53 17 14 33 30 14 49 27 14 46 31 14 50 32 14 51 32 14 51 35 14 54 20 14 37 36 14 55 25 14 42 33 14 52 17 14 33 34 14 53 37 14 56 32 14 51 31 14 50 38 14 57 32 14 51 38 14 57 35 14 54 37 14 56 34 14 53 39 14 58 38 14 57 31 14 50 40 14 59

+
+
+
+ + + + 80.918825 115.938649 40.541643 82.666291 116.046559 44.483530 84.982027 114.172278 40.492403 80.448242 117.876505 48.464944 87.006771 115.179600 49.090046 80.042181 117.517098 46.021739 87.327221 114.024449 44.445812 89.155209 113.288041 44.689797 91.458869 112.972652 47.795822 95.197222 110.918756 45.792018 + + + + + + + + + + 0.388857 0.899985 -0.197020 + + + + + + + + + + 3.693566 0.101617 3.623601 0.293049 3.495029 0.099226 3.747366 0.486401 3.429644 0.516758 3.757682 0.367750 3.395931 0.291217 3.307647 0.303066 3.207280 0.453906 3.016996 0.356594 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 3 0 3 2 0 2 1 0 1 4 0 4 2 0 2 3 0 3 3 0 3 1 0 1 5 0 5 4 0 4 6 0 6 2 0 2 4 0 4 7 0 7 6 0 6 8 0 8 7 0 7 4 0 4 7 0 7 8 0 8 9 0 9

+
+
+
+ + + + 53.386841 129.035674 47.252958 52.320916 133.703222 45.115024 52.663511 129.758123 43.939151 52.711477 134.529853 47.831441 53.860085 126.168100 47.596139 54.256293 128.362402 51.384884 53.236867 132.670320 49.221321 + + + + + + + + + + -0.973409 -0.138839 0.182205 + + + + + + + + + + -4.368319 -0.629004 -4.582125 -0.732527 -4.404945 -0.789466 -4.616325 -0.600992 -4.238113 -0.612386 -4.332949 -0.428926 -4.530508 -0.533691 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 3 0 3 5 0 5 6 0 6

+
+
+
+ + + + 112.438330 66.588125 42.551944 112.602970 70.067362 42.012768 112.506738 68.973360 41.896716 113.031482 71.762500 43.987185 112.646214 65.500127 44.386485 113.391008 73.879061 45.325118 113.000156 66.559057 46.173907 114.624198 81.476861 49.759193 113.464668 76.631848 44.534944 113.360004 69.465419 47.151453 113.844684 82.526669 44.270590 113.762198 69.821403 49.571390 113.442169 81.380884 42.211040 113.886832 66.817515 51.750434 112.782199 76.673712 40.132218 113.660703 68.459571 49.544441 114.701032 78.278221 51.720594 112.902598 80.157653 39.306724 114.314767 71.319311 52.433138 112.752529 78.708966 39.007647 114.111132 67.016044 53.100014 115.106224 76.142862 55.303093 114.319280 71.133410 52.547437 114.769274 73.563802 54.322423 114.420834 68.079679 54.601117 115.136214 74.897408 56.067278 + + + + + + + + + + -0.985643 0.070422 0.153455 + + + + + + + + + + -0.978458 -0.426380 -1.134495 -0.452358 -1.085289 -0.457950 -1.211631 -0.357226 -0.930491 -0.337987 -1.307384 -0.292761 -0.978953 -0.251865 -1.650921 -0.079117 -1.430660 -0.330834 -1.110008 -0.204764 -1.695355 -0.343571 -1.127204 -0.088166 -1.642857 -0.442805 -0.993337 0.016826 -1.430352 -0.542968 -1.066010 -0.089464 -1.508197 0.015388 -1.586459 -0.582742 -1.195920 0.049720 -1.521227 -0.597152 -1.002927 0.081852 -1.414047 0.188002 -1.187626 0.055227 -1.297694 0.140750 -1.051457 0.154179 -1.358474 0.224822 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 3 0 3 0 0 0 4 0 4 3 0 3 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6 7 0 7 5 0 5 6 0 6 7 0 7 8 0 8 5 0 5 7 0 7 6 0 6 9 0 9 10 0 10 8 0 8 7 0 7 7 0 7 9 0 9 11 0 11 12 0 12 8 0 8 10 0 10 11 0 11 13 0 13 7 0 7 12 0 12 14 0 14 8 0 8 13 0 13 11 0 11 15 0 15 7 0 7 13 0 13 16 0 16 17 0 17 14 0 14 12 0 12 16 0 16 13 0 13 18 0 18 14 0 14 17 0 17 19 0 19 20 0 20 18 0 18 13 0 13 16 0 16 18 0 18 21 0 21 18 0 18 20 0 20 22 0 22 21 0 21 18 0 18 23 0 23 22 0 22 20 0 20 24 0 24 21 0 21 23 0 23 25 0 25

+
+
+
+ + + + 97.129293 69.311270 102.232431 99.432106 65.025886 98.049610 99.037320 71.255225 95.223332 + + + + + + + + + + 0.930240 0.198871 0.308388 + + + + + + + + + + 1.555328 3.012838 1.345965 2.803482 1.622639 2.662024 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2

+
+
+
+ + + + 65.588341 120.768701 52.776482 63.982669 120.063231 47.385210 64.723098 119.346793 43.656479 62.649788 120.971007 51.819047 67.499757 120.015131 49.688701 61.390616 120.214367 46.377714 64.070826 119.067680 41.510946 60.114450 120.874125 49.373716 67.674015 118.439711 40.418515 58.042134 120.053843 42.953805 69.770038 118.505613 42.355460 61.374616 119.186269 40.232570 70.272781 118.950594 45.380380 59.775657 119.142818 38.795668 71.524219 119.700388 50.775176 55.471732 119.866282 39.941684 74.844840 118.762673 47.626763 55.788847 120.146962 41.849711 + + + + + + + + + + -0.120869 -0.979010 0.164106 + + + + + + + + + + -3.697656 0.795219 -3.765192 0.535004 -3.728329 0.355033 -3.829454 0.749008 -3.608511 0.646184 -3.881299 0.486376 -3.755805 0.251476 -3.941677 0.630981 -3.592111 0.198749 -4.029336 0.321117 -3.499255 0.292238 -3.876365 0.189774 -3.479340 0.438239 -3.947238 0.120420 -3.427801 0.698624 -4.142620 0.175734 -3.274973 0.546663 -4.130058 0.267827 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 2 0 2 4 0 4 0 0 0 1 0 1 3 0 3 5 0 5 6 0 6 4 0 4 2 0 2 5 0 5 3 0 3 7 0 7 8 0 8 4 0 4 6 0 6 6 0 6 2 0 2 9 0 9 10 0 10 4 0 4 8 0 8 6 0 6 9 0 9 11 0 11 12 0 12 4 0 4 10 0 10 11 0 11 9 0 9 13 0 13 12 0 12 14 0 14 4 0 4 13 0 13 9 0 9 15 0 15 14 0 14 12 0 12 16 0 16 15 0 15 9 0 9 17 0 17

+
+
+
+ + + + 106.749168 102.437546 45.844309 103.605827 104.019802 41.543977 107.295007 101.026667 42.787203 105.652160 103.937018 47.515095 103.011562 105.964255 46.239683 103.195127 105.138552 44.035640 100.598100 107.698442 44.676652 + + + + + + + + + + -0.658008 -0.721516 0.215500 + + + + + + + + + + -2.872304 0.164487 -3.024156 -0.045185 -2.811629 0.015431 -2.953903 0.245950 -3.102546 0.183765 -3.071535 0.076302 -3.234820 0.107556 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 1 0 1 0 0 0 3 0 3 1 0 1 3 0 3 4 0 4 1 0 1 4 0 4 5 0 5 5 0 5 4 0 4 6 0 6

+
+
+
+ + + + 66.681929 115.359015 100.518947 77.678791 106.281922 102.976784 81.604479 107.691708 92.760692 + + + + + + + + + + 0.559386 0.764462 0.320446 0.550654 0.771748 0.318096 0.493674 0.819248 0.291749 + + + + + + + + + + 4.539313 2.162812 3.901431 2.286348 3.796771 1.772865 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2

+
+
+
+ + + + 45.919107 50.809989 9.533957 42.101423 30.593759 26.416155 40.015878 29.589494 25.738405 49.279545 52.428161 10.626016 35.963940 8.960123 73.791827 44.690406 54.079574 10.715803 46.901523 55.171273 11.445404 35.893897 5.479053 82.881069 39.253325 31.618654 26.471878 40.625582 32.296181 26.924681 35.216095 5.152667 82.660800 41.117529 31.728707 26.755173 34.968266 5.812143 82.899179 35.414249 6.032340 83.046340 38.001514 19.071521 55.183680 + + + + + + + + + + -0.259036 -0.539642 -0.801053 0.972960 -0.225066 0.051908 -0.312011 -0.713484 -0.627367 0.963462 -0.252859 0.088336 -0.347106 -0.850806 -0.394520 -0.312011 -0.713484 -0.627367 -0.946998 0.303362 0.105667 0.071781 0.545404 0.835094 0.706882 -0.393430 0.587819 -0.942336 0.321181 0.094047 -0.312011 -0.713484 -0.627367 -0.942336 0.321181 0.094047 0.122498 0.737525 0.664117 -0.346413 -0.903432 0.252604 0.708294 0.452075 0.542169 0.706882 -0.393430 0.587819 0.972960 -0.225066 0.051908 -0.827821 0.075835 0.555842 -0.942336 0.321181 0.094047 0.074598 0.526949 0.846616 0.074598 0.526949 0.846616 -0.942336 0.321181 0.094047 -0.827821 0.075835 0.555842 0.167427 0.886882 0.430590 0.122498 0.737525 0.664117 + + + + + + + + + + 9.894464 -0.561230 7.054804 1.386890 6.463589 1.352369 10.847089 -0.505606 6.968238 1.677679 4.653924 5.858888 6.375155 1.646735 -2.929865 0.702759 -7.363595 -1.162880 -6.450158 -1.193575 3.738572 -2.155933 0.509223 -0.038619 2.812703 -2.169794 4.528998 6.646361 -1.128193 5.695908 -3.292614 1.495832 -2.725176 1.478579 -3.496759 0.721809 1.083832 -0.030016 4.336246 6.636304 0.737741 1.315313 -0.742800 6.329665 0.354525 1.310701 -1.017863 6.479886 -7.021228 1.274778 -10.200669 -0.694304 -6.630366 1.251313 -10.830465 -0.656496 0.892296 -0.032884 1.508266 2.502193 1.615025 2.430738 1.690925 2.489246 -0.833446 6.474279 0.929349 1.317618 -4.498558 6.626442 -6.503163 1.629021 -4.371035 6.619743 -5.688586 4.155612 -6.895541 1.649634 1.459219 2.463319 0.181439 3.842471 -0.555982 6.331913 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 1 0 0 0 3 3 3 1 1 4 4 4 5 2 5 6 2 2 7 5 6 8 0 0 9 6 7 10 1 1 11 3 3 12 7 8 13 4 4 5 1 1 4 4 4 14 8 9 15 2 10 16 5 6 8 2 2 7 8 11 17 1 1 11 6 7 10 9 12 18 4 4 5 7 8 13 10 13 19 11 14 20 7 15 21 1 16 22 8 9 15 4 4 14 12 17 23 9 12 24 5 6 25 8 18 26 5 6 25 9 12 24 6 7 27 1 1 11 9 12 18 11 14 28 13 19 29 10 13 30 7 15 31 12 17 23 4 4 14 10 13 32 7 15 21 11 14 20 9 12 33 13 20 34 8 21 35 12 22 36 8 21 35 14 23 37 9 24 38 10 13 30 13 19 29 12 17 39 7 15 21 14 23 40 13 20 41 7 15 21 9 12 33 14 23 40 8 21 35 13 20 34 14 23 37

+
+
+
+ + + + 28.776764 -10.621623 87.289720 36.479705 9.222224 74.476451 35.628728 9.498977 74.424827 35.963940 8.960123 73.791827 + + + + + + + + + + -0.291762 -0.799063 0.525712 0.874080 -0.026080 0.485081 -0.769962 0.508593 0.385346 -0.291762 -0.799063 0.525712 -0.122349 -0.532444 -0.837577 -0.291762 -0.799063 0.525712 + + + + + + + + + + -2.375746 5.484077 -3.537595 3.507089 -3.317622 3.494743 4.589063 5.895786 0.312103 7.350300 4.424997 5.846547 -2.181370 4.721944 -2.002799 4.678372 1.620038 6.310069 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 3 0 3 4 3 4 5 2 2 6 3 4 7 0 5 8

+
+
+
+ + + + 38.050824 17.652882 54.718116 36.865600 18.147912 55.480944 38.001514 19.071521 55.183680 55.883381 7.276691 90.621298 + + + + + + + + + + 0.032576 -0.342291 -0.939029 -0.949258 0.067790 -0.307105 -0.008272 0.832903 -0.553357 0.440678 -0.266041 0.857336 -0.949258 0.067790 -0.307105 -0.949258 0.067790 -0.307105 -0.008272 0.832903 -0.553357 + + + + + + + + + + -3.839477 0.747631 -3.977718 0.859990 -4.207723 0.764866 6.766661 7.500425 2.188010 4.494555 2.524293 4.442786 -9.334923 7.073815 -5.668548 3.938338 -5.323260 3.983399 2.597091 7.798740 3.152706 4.448960 3.523782 4.457660 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 3 3 6 2 2 7 1 5 8 3 3 9 0 0 10 2 6 11

+
+
+
+ + + + 40.183092 45.922117 96.029249 39.402740 20.346536 48.197417 40.188991 21.938086 47.373626 38.244109 21.761109 47.499985 + + + + + + + + + + 0.110182 -0.877230 0.467255 0.915716 -0.360373 0.177751 -0.805516 -0.516999 0.289580 + + + + + + + + + + -2.727400 11.548786 -7.575064 7.446874 -7.117073 7.392662 13.575559 6.129015 14.062923 6.128272 13.882611 10.522474 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 1 2 3 2 3 1 0 4 0 0 5

+
+
+
+ + + + 48.846941 54.575006 4.098767 53.008606 69.714965 25.586938 55.257352 70.552569 25.344137 45.223538 53.225373 4.489992 56.194244 78.981819 77.105034 49.556728 50.986199 4.560637 47.168288 50.069651 4.822282 55.636319 80.265340 86.737188 55.697857 68.325296 25.630782 54.215551 67.756470 25.793163 56.367161 80.537562 86.658277 53.813236 68.409302 25.724421 56.510325 79.813698 86.751437 56.028576 79.628830 86.804211 55.128465 73.734572 56.514121 + + + + + + + + + + 0.406863 0.666878 -0.624289 -0.940833 0.338040 0.023705 0.459333 0.787845 -0.410261 -0.932402 0.355273 0.066385 0.483368 0.862605 -0.149224 0.888628 -0.445771 0.107835 -0.226485 -0.705721 0.671314 -0.719315 0.342399 0.604442 0.883313 -0.459679 0.091941 0.459333 0.787845 -0.410261 0.883313 -0.459679 0.091941 -0.279101 -0.845237 0.455716 0.415926 0.766522 0.489336 -0.811355 -0.462343 0.357691 -0.719315 0.342399 0.604442 -0.940833 0.338040 0.023705 0.746163 -0.312314 0.587963 0.883313 -0.459679 0.091941 -0.228470 -0.690157 0.686647 -0.228470 -0.690157 0.686647 -0.314131 -0.930106 0.190328 -0.228470 -0.690157 0.686647 + + + + + + + + + + 9.894464 -0.561230 7.054804 1.386890 6.463589 1.352369 10.847089 -0.505606 6.968238 1.677679 4.653924 5.858888 6.375155 1.646735 -2.929865 0.702759 -7.363595 -1.162880 -6.450158 -1.193575 3.738572 -2.155933 0.509223 -0.038619 2.812703 -2.169794 4.528998 6.646361 -1.128193 5.695908 -3.292614 1.495832 -2.725176 1.478579 -3.496759 0.721809 1.083832 -0.030016 4.336246 6.636304 0.737741 1.315313 -0.742800 6.329665 0.354525 1.310701 -1.017863 6.479886 -7.021228 1.274778 -10.200669 -0.694304 -6.630366 1.251313 -10.830465 -0.656496 0.892296 -0.032884 1.508266 2.502193 1.615025 2.430738 1.690925 2.489246 -0.833446 6.474279 0.929349 1.317618 -4.498558 6.626442 -6.503163 1.629021 -4.371035 6.619743 -5.688586 4.155612 -6.895541 1.649634 1.459219 2.463319 0.181439 3.842471 -0.555982 6.331913 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 1 0 0 0 3 3 3 1 1 4 4 4 5 2 2 6 2 2 7 5 5 8 0 0 9 6 6 10 1 1 11 3 3 12 7 7 13 4 4 5 1 1 4 4 4 14 8 8 15 2 9 16 5 5 8 2 2 7 8 10 17 1 1 11 6 6 10 9 11 18 4 4 5 7 7 13 10 12 19 11 13 20 7 14 21 1 15 22 8 8 15 4 4 14 12 16 23 9 11 24 5 5 25 8 17 26 5 5 25 9 11 24 6 6 27 1 1 11 9 11 18 11 13 28 13 18 29 10 12 30 7 14 31 12 16 23 4 4 14 10 12 32 7 14 21 11 13 20 9 11 33 13 19 34 8 10 35 12 16 36 8 10 35 14 20 37 9 11 38 10 12 30 13 18 29 12 16 39 7 14 21 14 20 40 13 21 41 7 14 21 9 11 33 14 20 40 8 10 35 13 19 34 14 20 37

+
+
+
+ + + + 63.990667 93.818430 95.445813 55.578464 78.644043 77.661938 56.392990 78.272747 77.616049 56.194244 78.981819 77.105034 + + + + + + + + + + 0.319462 0.611335 0.724026 -0.914438 0.034319 0.403268 0.659184 -0.683004 0.314614 0.319462 0.611335 0.724026 0.275216 0.686885 -0.672640 0.319462 0.611335 0.724026 + + + + + + + + + + -2.375746 5.484077 -3.537595 3.507089 -3.317622 3.494743 4.589063 5.895786 0.312103 7.350300 4.424997 5.846547 -2.181370 4.721944 -2.002799 4.678372 1.620038 6.310069 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 3 0 3 4 3 4 5 2 2 6 3 4 7 0 5 8

+
+
+
+ + + + 55.292801 75.214960 56.399388 56.321044 74.402651 57.116737 55.128465 73.734572 56.514121 34.829507 79.502961 92.142507 + + + + + + + + + + 0.112136 0.547706 -0.829122 0.963928 -0.125207 -0.234874 -0.023999 -0.678641 -0.734078 -0.501238 0.122035 0.856661 0.963928 -0.125207 -0.234874 0.963928 -0.125207 -0.234874 -0.023999 -0.678641 -0.734078 + + + + + + + + + + -3.839477 0.747631 -3.977718 0.859990 -4.207723 0.764866 6.766661 7.500425 2.188010 4.494555 2.524293 4.442786 -9.334923 7.073815 -5.668548 3.938338 -5.323260 3.983399 2.597091 7.798740 3.152706 4.448960 3.523782 4.457660 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 3 3 6 2 2 7 1 5 8 3 3 9 0 0 10 2 6 11

+
+
+
+ + + + 45.343354 38.867442 89.417147 54.385835 74.282524 49.334621 53.521946 73.042185 48.091598 55.447758 72.918692 48.416943 + + + + + + + + + + -0.061909 0.754843 0.652978 -0.883137 0.432128 0.182576 0.821218 0.322837 0.470508 + + + + + + + + + + -2.727400 11.548786 -7.575064 7.446874 -7.117073 7.392662 13.575559 6.129015 14.062923 6.128272 13.882611 10.522474 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 1 2 3 2 3 1 0 4 0 0 5

+
+
+
+ + + + 43.588370 52.934613 12.209768 23.092926 54.725550 29.091966 21.886491 56.701040 28.414216 45.532299 49.751506 13.301827 0.956684 58.684038 76.467638 46.719755 54.481973 13.391614 48.025664 52.390215 14.121214 -2.514132 58.407996 85.556880 23.829881 57.661357 29.147689 24.640350 56.363177 29.600492 -2.906223 59.050030 85.336611 24.124542 55.817301 29.430983 -2.274622 59.362133 85.574990 -2.011219 58.940225 85.722151 11.220457 57.660801 57.859491 + + + + + + + + + + -0.562702 0.204158 -0.801053 -0.127319 -0.990503 0.051908 -0.740945 0.239605 -0.627367 -0.155918 -0.983812 0.088336 -0.881074 0.260888 -0.394520 -0.740945 0.239605 -0.627367 0.207807 0.972446 0.105667 0.549836 -0.017257 0.835094 -0.321278 -0.742462 0.587819 0.226001 0.969577 0.094047 -0.740945 0.239605 -0.627367 0.226001 0.969577 0.094047 0.746045 -0.048642 0.664117 -0.933371 0.254972 0.252604 0.520188 -0.659892 0.542169 -0.127319 -0.990503 0.051908 -0.006759 0.831260 0.555842 0.531753 -0.021893 0.846616 0.531753 -0.021893 0.846616 0.226001 0.969577 0.094047 -0.006759 0.831260 0.555842 0.899126 -0.078514 0.430590 0.746045 -0.048642 0.664117 -0.006759 0.831260 0.555842 + + + + + + + + + + 9.894464 -0.561230 7.054804 1.386890 6.463589 1.352369 10.847089 -0.505606 6.968238 1.677679 4.653924 5.858888 6.375155 1.646735 -2.929865 0.702759 -7.363595 -1.162880 -6.450158 -1.193575 3.738572 -2.155933 0.509223 -0.038619 2.812703 -2.169794 4.528998 6.646361 -1.128193 5.695908 -3.292614 1.495832 -2.725176 1.478579 -3.496759 0.721809 1.083832 -0.030016 4.336246 6.636304 0.737741 1.315313 -0.742800 6.329665 0.354525 1.310701 -1.017863 6.479886 -7.021228 1.274778 -10.200669 -0.694304 -6.630366 1.251313 -10.830465 -0.656496 0.892296 -0.032884 1.508266 2.502193 1.615025 2.430738 1.690925 2.489246 -0.833446 6.474279 0.929349 1.317618 -4.498558 6.626442 -6.503163 1.629021 -4.371035 6.619743 -5.688586 4.155612 -6.895541 1.649634 1.459219 2.463319 0.181439 3.842471 -0.555982 6.331913 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 1 0 0 0 3 3 3 1 1 4 4 4 5 2 5 6 2 2 7 5 6 8 0 0 9 6 7 10 1 1 11 3 3 12 7 8 13 4 4 5 1 1 4 4 4 14 8 9 15 2 10 16 5 6 8 2 2 7 8 11 17 1 1 11 6 7 10 9 12 18 4 4 5 7 8 13 10 13 19 11 14 20 7 8 21 1 15 22 8 9 15 4 4 14 12 16 23 9 12 24 5 6 25 8 11 26 5 6 25 9 12 24 6 7 27 1 1 11 9 12 18 11 14 28 13 17 29 10 13 30 7 8 31 12 16 23 4 4 14 10 13 32 7 8 21 11 14 20 9 12 33 13 18 34 8 19 35 12 20 36 8 19 35 14 21 37 9 22 38 10 13 30 13 17 29 12 23 39 7 8 21 14 21 40 13 18 41 7 8 21 9 12 33 14 21 40 8 19 35 13 18 34 14 21 37

+
+
+
+ + + + -19.242071 63.890824 89.965531 1.268713 58.196856 77.152261 1.459579 59.071112 77.100638 0.956684 58.684038 76.467638 + + + + + + + + + + -0.824090 0.210956 0.525712 0.060863 -0.872349 0.485081 0.429605 0.816669 0.385346 -0.824090 0.210956 0.525712 -0.541963 0.068862 -0.837577 -0.824090 0.210956 0.525712 + + + + + + + + + + -2.375746 5.484077 -3.537595 3.507089 -3.317622 3.494743 4.589063 5.895786 0.312103 7.350300 4.424997 5.846547 -2.181370 4.721944 -2.002799 4.678372 1.620038 6.310069 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 3 0 3 4 3 4 5 2 2 6 3 4 7 0 5 8

+
+
+
+ + + + 9.813730 57.470836 57.393926 10.188596 58.699366 58.156755 11.220457 57.660801 57.859491 1.259969 38.695890 93.297108 + + + + + + + + + + -0.337363 -0.066411 -0.939029 -0.026825 0.951297 -0.307105 0.827963 0.090955 -0.553357 -0.220957 -0.464922 0.857336 -0.026825 0.951297 -0.307105 -0.026825 0.951297 -0.307105 0.827963 0.090955 -0.553357 + + + + + + + + + + -3.839477 0.747631 -3.977718 0.859990 -4.207723 0.764866 6.766661 7.500425 2.188010 4.494555 2.524293 4.442786 -9.334923 7.073815 -5.668548 3.938338 -5.323260 3.983399 2.597091 7.798740 3.152706 4.448960 3.523782 4.457660 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 0 5 3 3 6 2 2 7 1 5 8 3 3 9 0 0 10 2 6 11

+
+
+
+ + + + 38.154967 58.156804 98.705059 12.628338 56.393137 50.873228 14.290108 55.768846 50.049437 13.920841 57.686535 50.175796 + + + + + + + + + + -0.861949 -0.196764 0.467255 -0.267643 -0.946980 0.177751 -0.594447 0.750185 0.289580 + + + + + + + + + + -2.727400 11.548786 -7.575064 7.446874 -7.117073 7.392662 13.575559 6.129015 14.062923 6.128272 13.882611 10.522474 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 1 2 3 2 3 1 0 4 0 0 5

+
+
+
+ + + + 52.995919 52.540627 0.266614 45.532299 49.751506 13.301827 49.131204 48.450628 0.266614 48.025664 52.390215 14.121214 43.588370 52.934613 12.209768 50.971760 55.782851 0.266614 46.719755 54.481973 13.391614 46.118114 53.384443 0.266614 + + + + + + + + + + 0.926225 -0.366149 -0.089681 0.076826 -0.826939 0.557019 -0.419658 -0.764719 -0.488971 0.715122 0.177156 0.676178 0.076826 -0.826939 0.557019 0.926225 -0.366149 -0.089681 -0.952977 0.265770 0.145607 -0.419658 -0.764719 -0.488971 0.076826 -0.826939 0.557019 -0.419658 -0.764719 -0.488971 0.231960 0.957835 -0.169551 0.926225 -0.366149 -0.089681 -0.028788 0.799121 0.600481 0.715122 0.177156 0.676178 0.715122 0.177156 0.676178 0.926225 -0.366149 -0.089681 -0.697560 0.209358 -0.685258 -0.952977 0.265770 0.145607 -0.028788 0.799121 0.600481 -0.952977 0.265770 0.145607 -0.028788 0.799121 0.600481 -0.952977 0.265770 0.145607 + + + + + + + + + + 2.349418 -0.084846 0.567966 1.008771 0.948054 -0.084846 1.476910 1.087843 0.574122 1.019589 2.400158 -0.066218 -0.922529 0.918900 0.777816 -0.062831 0.005147 1.008667 -1.380381 0.000000 -1.838751 0.594003 -2.342846 0.331342 -1.628424 0.556990 -0.414924 0.538539 -1.183765 0.706921 0.575167 -0.243422 0.247021 0.931690 -0.376711 -0.243422 -0.720273 -0.075302 0.719449 -0.075302 -0.953022 0.911482 -1.093754 0.314123 -0.630005 0.399702 0.335167 -0.253045 0.720406 0.862593 0.109010 0.924610 -0.494703 0.985333 -2.457411 0.016043 -1.109141 0.016043 -1.347735 1.078342 -2.443408 0.013770 -0.478003 0.982482 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 4 4 0 5 5 4 6 6 2 7 7 1 8 8 2 9 9 5 10 10 0 11 11 6 12 12 1 8 13 3 13 14 5 10 15 3 14 16 0 15 17 7 16 18 2 7 19 4 17 20 1 8 13 6 12 12 4 17 21 5 10 10 2 9 9 7 16 22 5 10 23 6 18 24 3 13 25 4 19 26 5 10 27 7 16 28 6 20 29 5 10 30 4 21 31

+
+
+
+ + + + 53.181810 52.620275 -0.199664 67.862684 51.044060 21.942306 69.234278 49.081312 21.652926 50.971760 55.782851 0.266614 75.218852 47.317594 73.731661 49.857374 51.040209 0.019801 48.374333 53.118176 0.329327 75.877626 47.629967 83.437614 67.171077 48.100697 21.789129 66.250677 49.390318 21.981227 76.323395 46.992074 83.343566 66.788012 49.941565 21.968253 75.652854 46.673374 83.387832 75.353724 47.092501 83.450264 70.834344 48.233296 52.932797 + + + + + + + + + + 0.774335 -0.198254 -0.600917 0.096653 0.991782 0.083830 0.894354 -0.231833 -0.382601 0.113418 0.985391 0.127047 0.960488 -0.251647 -0.118896 0.894354 -0.231833 -0.382601 -0.219871 -0.974569 0.043273 -0.770113 0.011498 0.637804 0.127926 0.745786 0.653788 -0.233901 -0.971890 0.026833 0.894354 -0.231833 -0.382601 -0.233901 -0.971890 0.026833 -0.908063 0.040827 0.416839 0.821237 -0.245184 0.515223 -0.662554 0.654409 0.364378 0.096653 0.991782 0.083830 -0.147725 -0.831144 0.536076 -0.756236 0.016323 0.654095 -0.756236 0.016323 0.654095 -0.233901 -0.971890 0.026833 -0.147725 -0.831144 0.536076 -0.986470 0.069094 0.148671 -0.908063 0.040827 0.416839 -0.756236 0.016323 0.654095 + + + + + + + + + + 9.894464 -0.561230 7.054804 1.386890 6.463589 1.352369 10.847089 -0.505606 6.968238 1.677679 4.653924 5.858888 6.375155 1.646735 -2.929865 0.702759 -7.363595 -1.162880 -6.450158 -1.193575 3.738572 -2.155933 0.509223 -0.038619 2.812703 -2.169794 4.528998 6.646361 -1.128193 5.695908 -3.292614 1.495832 -2.725176 1.478579 -3.496759 0.721809 1.083832 -0.030016 4.336246 6.636304 0.737741 1.315313 -0.742800 6.329665 0.354525 1.310701 -1.017863 6.479886 -7.021228 1.274778 -10.200669 -0.694304 -6.630366 1.251313 -10.830465 -0.656496 0.892296 -0.032884 1.508266 2.502193 1.615025 2.430738 1.690925 2.489246 -0.833446 6.474279 0.929349 1.317618 -4.498558 6.626442 -6.503163 1.629021 -4.371035 6.619743 -5.688586 4.155612 -6.895541 1.649634 1.459219 2.463319 0.181439 3.842471 -0.555982 6.331913 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 1 0 0 0 3 3 3 1 1 4 4 4 5 2 5 6 2 2 7 5 6 8 0 0 9 6 7 10 1 1 11 3 3 12 7 8 13 4 4 5 1 1 4 4 4 14 8 9 15 2 10 16 5 6 8 2 2 7 8 11 17 1 1 11 6 7 10 9 12 18 4 4 5 7 8 13 10 13 19 11 14 20 7 8 21 1 15 22 8 9 15 4 4 14 12 16 23 9 12 24 5 6 25 8 9 26 5 6 25 9 12 24 6 7 27 1 1 11 9 12 18 11 14 28 13 17 29 10 13 30 7 8 31 12 16 23 4 4 14 10 13 32 7 8 21 11 14 20 9 12 33 13 18 34 8 19 35 12 20 36 8 19 35 14 21 37 9 22 38 10 13 30 13 17 29 12 16 39 7 8 21 14 21 40 13 23 41 7 8 21 9 12 33 14 21 40 8 19 35 13 18 34 14 21 37

+
+
+
+ + + + 90.639698 42.322612 92.560922 74.715430 47.801483 74.293655 74.556762 46.925276 74.191163 75.218852 47.317594 73.731661 + + + + + + + + + + 0.636447 -0.202315 0.744314 -0.208760 0.871664 0.443420 -0.515297 -0.821123 0.245411 0.636447 -0.202315 0.744314 0.763826 -0.063183 -0.642322 0.636447 -0.202315 0.744314 + + + + + + + + + + -2.375746 5.484077 -3.537595 3.507089 -3.317622 3.494743 4.589063 5.895786 0.312103 7.350300 4.424997 5.846547 -2.181370 4.721944 -2.002799 4.678372 1.620038 6.310069 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 1 1 3 0 3 4 3 4 5 2 2 6 3 4 7 0 5 8

+
+
+
+ + + + 72.313746 48.437982 52.898258 71.744552 47.205593 53.521922 70.834344 48.233296 52.932797 69.808209 67.301471 89.675900 + + + + + + + + + + 0.596485 0.069941 -0.799571 0.124967 -0.950964 -0.282931 -0.629045 -0.099620 -0.770959 -0.044026 0.467210 0.883049 0.124967 -0.950964 -0.282931 -0.629045 -0.099620 -0.770959 + + + + + + + + + + -3.839477 0.747631 -3.977718 0.859990 -4.207723 0.764866 6.766661 7.500425 2.188010 4.494555 2.524293 4.442786 -9.334923 7.073815 -5.668548 3.938338 -5.323260 3.983399 2.597091 7.798740 3.152706 4.448960 3.523782 4.457660 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 2 2 3 3 3 1 1 4 0 0 5 3 3 6 2 2 7 1 4 8 3 3 9 0 0 10 2 5 11

+
+
+
+ + + + 33.141035 47.455268 84.120670 71.517946 49.486147 45.836317 70.163475 50.093002 44.560783 70.498848 48.179286 44.795449 + + + + + + + + + + 0.685658 0.205779 0.698232 0.194481 0.949731 0.245332 0.491288 -0.743918 0.453013 + + + + + + + + + + -2.727400 11.548786 -7.575064 7.446874 -7.117073 7.392662 13.575559 6.129015 14.062923 6.128272 13.882611 10.522474 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 1 2 3 2 3 1 0 4 0 0 5

+
+
+
+
+ + + + + + 194.384776 + 145.788582 + 1.000000 + 1000.000000 + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + -0.188510 -0.112956 0.975554 392.535546 + 0.982071 -0.021682 0.187259 116.311135 + 0.000000 0.993363 0.115018 109.490705 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + +
diff --git a/wild_visual_navigation_sim/Media/models/PineTree.dae b/wild_visual_navigation_sim/Media/models/PineTree.dae new file mode 100644 index 00000000..6d24465f --- /dev/null +++ b/wild_visual_navigation_sim/Media/models/PineTree.dae @@ -0,0 +1,1250 @@ + + + + + Google SketchUp 6.4.112 + + 2009-12-14T02:59:44Z + 2009-12-14T02:59:44Z + + Z_UP + + + + + + + + + + + + + + + + 0.000000 0.000000 0.000000 1 + + + 0.000000 0.000000 0.000000 1 + + + 0.333333 0.466667 0.176471 1 + + + 0.330000 0.330000 0.330000 1 + + + 20.000000 + + + 0.100000 + + + 1 1 1 1 + + + 0.000000 + + + + + + + + + + + 0.000000 0.000000 0.000000 1 + + + 0.000000 0.000000 0.000000 1 + + + 0.137255 0.137255 0.137255 1 + + + 0.330000 0.330000 0.330000 1 + + + 20.000000 + + + 0.100000 + + + 1 1 1 1 + + + 0.000000 + + + + + + + + + + + -0.000000 0.000000 -0.000000 -0.381253 8.727710 0.035112 2.220472 0.000000 -0.000000 0.106270 8.427786 -0.019240 + + + + + + + + + + 0.000000 0.004023 -0.999992 -0.000000 -0.004023 0.999992 -0.132234 -0.035434 -0.990585 0.132234 0.035434 0.990585 + + + + + + + + + + -0.000000 -0.000000 -0.381253 8.727781 2.220472 0.000000 8.528973 -1.868132 8.113083 -2.265151 -0.574727 -2.124611 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 2 1 2 1 1 1 0 1 0 1 2 3 3 2 4 2 2 5 2 3 5 3 3 4 1 3 3

+
+
+
+ + + + -2.220472 0.000000 -0.000000 0.381253 8.727710 0.035112 0.000000 0.000000 -0.000000 -0.106270 8.427786 -0.019240 + + + + + + + + + + -0.000000 0.004023 -0.999992 0.000000 -0.004023 0.999992 0.132234 -0.035434 -0.990585 -0.132234 0.035434 0.990585 + + + + + + + + + + 2.220472 0.000000 -0.381253 8.727781 -0.000000 -0.000000 -0.574727 -2.124611 8.113083 -2.265151 8.528973 -1.868132 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 2 1 2 1 1 1 0 1 0 0 2 3 3 2 4 1 2 5 1 3 5 3 3 4 0 3 3

+
+
+
+ + + + 0.000000 0.000000 0.000000 0.748031 0.000000 0.000000 0.000000 0.000000 27.814961 0.374016 0.000000 27.814961 + + + + + + + + + + 0.000000 -1.000000 0.000000 0.000000 1.000000 0.000000 + + + + + + + + + + 0.000000 0.000000 -0.748031 0.000000 0.000000 27.814961 -0.374016 27.814961 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 2 1 2 1 1 1 0 1 0 3 0 3 2 0 2 1 0 1 1 1 1 2 1 2 3 1 3

+
+
+
+ + + + 0.000000 0.000000 0.000000 0.000000 0.944882 0.000000 30.992126 0.472441 0.000000 30.992126 0.944882 0.000000 + + + + + + + + + + 0.000000 0.000000 -1.000000 0.000000 0.000000 1.000000 + + + + + + + + + + 0.000000 0.000000 0.000000 0.944882 30.992126 0.472441 30.992126 0.944882 + + + + + + + + + + + + + + +

0 0 0 1 0 1 2 0 2 2 1 2 1 1 1 0 1 0 3 0 3 2 0 2 1 0 1 1 1 1 2 1 2 3 1 3

+
+
+
+ + + + 2.364003 3.691916 0.000000 1.877953 3.755906 0.000000 0.520081 0.812221 58.311024 0.413150 0.826299 58.311024 0.413150 0.000000 58.311024 0.306219 0.014078 58.311024 0.306219 0.812221 58.311024 1.391903 3.691916 0.000000 1.877953 0.000000 0.000000 2.364003 0.063990 0.000000 1.391903 0.063990 0.000000 0.520081 0.014078 58.311024 0.619724 0.770948 58.311024 2.816929 3.504308 0.000000 2.816929 0.251598 0.000000 3.205866 0.550040 0.000000 0.619724 0.055352 58.311024 0.705290 0.121009 58.311024 0.705290 0.705290 58.311024 0.770948 0.619724 58.311024 3.504308 2.816929 0.000000 3.691916 2.364003 0.000000 0.812221 0.520081 58.311024 3.755906 1.877953 0.000000 0.826299 0.413150 58.311024 3.691916 1.391903 0.000000 0.812221 0.306219 58.311024 3.504308 0.938976 0.000000 0.770948 0.206575 58.311024 3.205866 3.205866 0.000000 0.938976 3.504308 0.000000 0.938976 0.251598 0.000000 0.550040 3.205866 0.000000 0.550040 0.550040 0.000000 0.121009 0.121009 58.311024 0.206575 0.055352 58.311024 0.206575 0.770948 58.311024 0.121009 0.705290 58.311024 0.055352 0.206575 58.311024 0.055352 0.619724 58.311024 0.251598 2.816929 0.000000 0.251598 0.938976 0.000000 0.063990 2.364003 0.000000 0.063990 1.391903 0.000000 0.014078 0.306219 58.311024 0.014078 0.520081 58.311024 0.000000 0.413150 58.311024 0.000000 1.877953 0.000000 + + + + + + + + + + 0.258400 0.964425 0.055799 -0.000021 0.998740 0.050177 -0.258400 -0.964425 -0.055799 0.000021 -0.998740 -0.050177 0.000000 0.000000 1.000000 0.000000 0.000000 -1.000000 -0.258603 0.965033 0.042843 0.258603 -0.965033 -0.042843 -0.258813 -0.965911 -0.005646 -0.000000 -1.000000 -0.000000 0.258813 0.965911 0.005646 0.000000 1.000000 0.000000 0.258808 -0.965901 0.007357 -0.258808 0.965901 -0.007357 0.499111 0.864504 0.059331 -0.499111 -0.864504 -0.059331 0.499929 -0.865920 0.015923 0.706873 -0.706895 0.025112 -0.499929 0.865920 -0.015923 -0.706873 0.706895 -0.025112 0.864504 0.499111 0.059331 0.705810 0.705810 0.060535 0.964425 0.258400 0.055799 -0.964425 -0.258400 -0.055799 -0.864504 -0.499111 -0.059331 0.998740 -0.000021 0.050177 -0.998740 0.000021 -0.050177 0.965033 -0.258603 0.042843 -0.965033 0.258603 -0.042843 0.865506 -0.499723 0.034294 -0.865506 0.499723 -0.034294 -0.705810 -0.705810 -0.060535 -0.707069 -0.707069 -0.010405 -0.499978 -0.865990 -0.009194 0.707069 0.707069 0.010405 0.499978 0.865990 0.009194 -0.706895 0.706873 0.025112 -0.499723 0.865506 0.034294 0.706895 -0.706873 -0.025112 0.499723 -0.865506 -0.034294 -0.865920 0.499929 0.015923 0.865920 -0.499929 -0.015923 -0.865990 -0.499978 -0.009194 0.865990 0.499978 0.009194 -0.965911 -0.258813 -0.005646 0.965911 0.258813 0.005646 -0.965901 0.258808 0.007357 0.965901 -0.258808 -0.007357 -1.000000 0.000000 0.000000 1.000000 -0.000000 -0.000000 + + + + + + + + + + 1.861887 -0.210413 1.371643 -0.210413 0.409615 58.182730 0.301761 58.182730 -0.520081 0.812221 -0.413150 0.826299 -0.413150 0.000000 -0.306219 0.014078 -0.306219 0.812221 0.409615 58.212424 0.517469 58.212424 1.861887 -0.161695 2.352131 -0.161695 1.877953 3.755906 1.877953 0.000000 1.391903 3.691916 2.364003 0.063990 1.391903 0.063990 -1.371643 -0.000804 -1.861887 -0.000804 -0.301761 58.310533 -0.409615 58.310533 -1.861887 -0.000804 -2.352131 -0.000804 -0.409615 58.310533 -0.517469 58.310533 -0.520081 0.014078 -0.619724 0.770948 0.169668 58.159391 0.277522 58.159391 0.771219 -0.248710 1.261463 -0.248710 2.816929 3.504308 2.816929 0.251598 2.364003 3.691916 3.205866 0.550040 -2.387983 -0.030705 -2.878227 -0.030705 -0.525356 58.292295 -0.633210 58.292295 -0.705290 0.121009 -0.705290 0.705290 -0.619724 0.055352 -0.770948 0.619724 -0.022337 58.146550 -0.101533 -0.269785 -0.130191 58.146550 -1.261463 -0.248710 -0.277522 58.159391 -0.771219 -0.248710 -0.169668 58.159391 -1.861887 -0.210413 -0.409615 58.182730 -1.371643 -0.210413 -0.301761 58.182730 -2.352131 -0.161695 -0.517469 58.212424 -1.861887 -0.161695 -0.409615 58.212424 -2.698786 -0.110732 -0.593733 58.243493 -2.208541 -0.110732 -0.485879 58.243493 -2.878227 -0.065218 -0.633210 58.271246 -2.387983 -0.065218 -0.525356 58.271246 -0.770948 0.206575 -0.812221 0.520081 0.022337 58.146550 0.130191 58.146550 0.101533 -0.269785 0.591777 -0.269785 3.205866 3.205866 3.504308 0.938976 -2.208541 -0.009563 -2.698786 -0.009563 -0.485879 58.305190 -0.593733 58.305190 3.504308 2.816929 3.691916 1.391903 -0.591777 -0.269785 3.691916 2.364003 3.755906 1.877953 -0.812221 0.306219 -0.826299 0.413150 0.938976 3.504308 0.938976 0.251598 0.550040 3.205866 0.550040 0.550040 -0.101533 -0.007956 -0.591777 -0.007956 -0.022337 58.306171 -0.130191 58.306171 -0.206575 0.055352 -0.206575 0.770948 -0.121009 0.121009 -0.121009 0.705290 0.525356 58.271246 0.633210 58.271246 2.387983 -0.065218 2.878227 -0.065218 0.485879 58.243493 0.593733 58.243493 2.208541 -0.110732 2.698786 -0.110732 -0.055352 0.206575 -0.055352 0.619724 0.525356 58.292295 0.633210 58.292295 2.387983 -0.030705 2.878227 -0.030705 0.251598 2.816929 0.251598 0.938976 0.591777 -0.007956 0.101533 -0.007956 0.130191 58.306171 0.022337 58.306171 0.063990 2.364003 0.063990 1.391903 1.261463 -0.004684 0.771219 -0.004684 0.277522 58.308166 0.169668 58.308166 -0.014078 0.520081 -0.014078 0.306219 0.485879 58.305190 0.593733 58.305190 2.208541 -0.009563 2.698786 -0.009563 -0.000000 0.413150 0.301761 58.310533 0.409615 58.310533 1.371643 -0.000804 1.861887 -0.000804 0.000000 1.877953 2.352131 -0.000804 1.861887 -0.000804 0.517469 58.310533 0.409615 58.310533 -0.277522 58.308166 -0.169668 58.308166 -1.261463 -0.004684 -0.771219 -0.004684 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 0 2 2 2 2 1 3 1 0 2 0 3 1 3 2 0 2 1 1 1 1 3 1 2 2 2 3 3 3 2 4 4 3 4 5 4 4 6 4 5 6 3 5 5 2 5 4 4 4 6 3 4 5 5 4 7 5 5 7 3 5 5 4 5 6 3 4 5 6 4 8 5 4 7 6 6 9 3 1 10 7 6 11 7 7 11 3 3 10 6 7 9 1 1 12 7 6 11 3 1 10 3 3 10 7 7 11 1 3 12 1 5 13 8 5 14 7 5 15 9 5 16 8 5 14 1 5 13 1 4 13 8 4 14 9 4 16 7 4 15 8 4 14 1 4 13 8 5 14 10 5 17 7 5 15 10 8 18 8 9 19 5 8 20 5 10 20 8 11 19 10 10 18 4 9 21 5 8 20 8 9 19 8 11 19 5 10 20 4 11 21 8 9 22 9 12 23 4 9 24 4 11 24 9 13 23 8 11 22 11 12 25 4 9 24 9 12 23 9 13 23 4 11 24 11 13 25 11 4 26 2 4 4 4 4 6 12 4 27 2 4 4 11 4 26 11 5 26 2 5 4 12 5 27 4 5 6 2 5 4 11 5 26 2 0 28 12 14 29 0 0 30 0 2 30 12 15 29 2 2 28 13 14 31 0 0 30 12 14 29 13 5 32 14 5 33 0 5 34 15 5 35 14 5 33 13 5 32 14 16 36 15 17 37 16 16 38 16 18 38 15 19 37 14 18 36 17 17 39 16 16 38 15 17 37 15 19 37 16 18 38 17 19 39 17 4 40 18 4 41 16 4 42 19 4 43 18 4 41 17 4 40 19 20 44 20 20 45 18 21 46 20 20 47 19 20 48 21 22 49 21 23 49 19 24 48 20 24 47 22 22 50 21 22 49 19 20 48 19 24 48 21 23 49 22 23 50 21 22 51 22 22 52 23 25 53 23 26 53 22 23 52 21 23 51 24 25 54 23 25 53 22 22 52 23 25 55 24 25 56 25 27 57 25 28 57 24 26 56 23 26 55 26 27 58 25 27 57 24 25 56 24 26 56 25 28 57 26 28 58 25 27 59 26 27 60 27 29 61 27 30 61 26 28 60 25 28 59 28 29 62 27 29 61 26 27 60 27 29 63 28 29 64 15 17 65 15 19 65 28 30 64 27 30 63 17 17 66 15 17 65 28 29 64 28 30 64 15 19 65 17 19 66 28 4 67 19 4 43 17 4 40 22 4 68 19 4 43 28 4 67 28 5 67 19 5 43 22 5 68 17 5 40 19 5 43 28 5 67 17 5 40 18 5 41 19 5 43 16 5 42 18 5 41 17 5 40 18 4 41 12 4 27 16 4 42 12 14 69 18 21 70 13 14 71 13 15 71 18 31 70 12 15 69 29 21 72 13 14 71 18 21 70 18 31 70 13 15 71 29 31 72 29 5 73 15 5 35 13 5 32 27 5 74 15 5 35 29 5 73 29 4 73 15 4 35 27 4 74 13 4 32 15 4 35 29 4 73 13 4 32 14 4 33 15 4 35 0 4 34 14 4 33 13 4 32 14 5 33 9 5 16 0 5 34 0 4 34 9 4 16 14 4 33 0 5 34 9 5 16 1 5 13 1 4 13 9 4 16 0 4 34 9 12 75 14 16 76 11 12 77 11 13 77 14 18 76 9 13 75 16 16 78 11 12 77 14 16 76 14 18 76 11 13 77 16 18 78 16 4 42 12 4 27 11 4 26 11 5 26 12 5 27 16 5 42 16 5 42 12 5 27 18 5 41 12 15 29 0 2 30 13 15 31 20 5 79 27 5 74 29 5 73 25 5 80 27 5 74 20 5 79 20 4 79 27 4 74 25 4 80 29 4 73 27 4 74 20 4 79 29 21 81 18 21 46 20 20 45 20 24 45 18 31 46 29 31 81 18 31 46 20 24 45 19 24 44 21 5 82 25 5 80 20 5 79 23 5 83 25 5 80 21 5 82 21 4 82 25 4 80 23 4 83 20 4 79 25 4 80 21 4 82 26 4 84 22 4 68 28 4 67 24 4 85 22 4 68 26 4 84 22 23 52 23 26 53 24 26 54 26 5 84 22 5 68 24 5 85 28 5 67 22 5 68 26 5 84 26 28 60 27 30 61 28 30 62 7 4 15 10 4 17 8 4 14 7 5 15 10 5 17 30 5 86 30 4 86 10 4 17 7 4 15 10 5 17 31 5 87 30 5 86 30 4 86 31 4 87 10 4 17 30 5 86 31 5 87 32 5 88 32 4 88 31 4 87 30 4 86 31 5 87 33 5 89 32 5 88 33 32 90 31 33 91 34 32 92 34 34 92 31 35 91 33 34 90 35 33 93 34 32 92 31 33 91 31 35 91 34 34 92 35 35 93 35 4 94 36 4 95 34 4 96 6 4 8 36 4 95 35 4 94 35 5 94 36 5 95 6 5 8 34 5 96 36 5 95 35 5 94 36 4 95 37 4 97 34 4 96 37 36 98 36 37 99 32 36 100 32 38 100 36 39 99 37 38 98 30 37 101 32 36 100 36 37 99 36 39 99 32 38 100 30 39 101 36 37 102 6 6 103 30 37 104 30 39 104 6 7 103 36 39 102 7 6 105 30 37 104 6 6 103 6 7 103 30 39 104 7 7 105 34 5 96 37 5 97 36 5 95 34 4 96 37 4 97 38 4 106 38 5 106 37 5 97 34 5 96 37 4 97 39 4 107 38 4 106 39 40 108 37 36 109 40 40 110 40 41 110 37 38 109 39 41 108 32 36 111 40 40 110 37 36 109 32 5 88 33 5 89 40 5 112 40 4 112 33 4 89 32 4 88 33 5 89 41 5 113 40 5 112 41 42 114 33 32 115 38 42 116 38 43 116 33 34 115 41 43 114 34 32 117 38 42 116 33 32 115 33 34 115 38 43 116 34 34 117 40 4 112 41 4 113 33 4 89 40 5 112 41 5 113 42 5 118 42 4 118 41 4 113 40 4 112 41 5 113 43 5 119 42 5 118 43 44 120 41 42 121 44 44 122 44 45 122 41 43 121 43 45 120 38 42 123 44 44 122 41 42 121 41 43 121 44 45 122 38 43 123 38 4 106 45 4 124 44 4 125 39 4 107 45 4 124 38 4 106 45 46 126 39 40 127 42 46 128 42 47 128 39 41 127 45 47 126 40 40 129 42 46 128 39 40 127 39 41 127 42 47 128 40 41 129 38 5 106 45 5 124 39 5 107 44 5 125 45 5 124 38 5 106 46 4 130 44 4 125 45 4 124 44 44 131 46 48 132 43 44 133 43 45 133 46 49 132 44 45 131 47 48 134 43 44 133 46 48 132 46 49 132 43 45 133 47 49 134 47 5 135 42 5 118 43 5 119 43 4 119 42 4 118 47 4 135 42 4 118 43 4 119 41 4 113 42 46 136 47 48 137 45 46 138 45 47 138 47 49 137 42 47 136 46 48 139 45 46 138 47 48 137 47 49 137 45 47 138 46 49 139 45 5 124 44 5 125 46 5 130 38 5 106 39 5 107 37 5 97 32 4 88 33 4 89 31 4 87 37 38 109 40 41 110 32 38 111 5 4 7 6 4 8 35 4 94 35 5 94 6 5 8 5 5 7 5 5 7 6 5 8 3 5 5 5 8 140 35 33 141 10 8 142 10 10 142 35 35 141 5 10 140 31 33 143 10 8 142 35 33 141 35 35 141 10 10 142 31 35 143

+
+
+
+ + + + 6.903543 0.616600 0.000000 7.856724 1.348001 0.000000 8.721476 0.129486 173.011811 8.921644 0.283080 173.011811 8.921644 1.649912 173.011811 9.075238 1.449744 173.011811 8.588125 6.903543 0.000000 7.856724 7.856724 0.000000 6.903543 8.588125 0.000000 8.588125 2.301181 0.000000 5.793541 9.047903 0.000000 5.793541 0.156822 0.000000 4.602362 9.204724 0.000000 4.602362 0.000000 0.000000 3.411183 9.047903 0.000000 3.411183 0.156822 0.000000 7.988081 0.032933 173.011811 8.238228 0.000000 173.011811 8.238228 1.932992 173.011811 8.488376 0.032933 173.011811 8.488376 1.900060 173.011811 7.988081 1.900060 173.011811 7.754980 0.129486 173.011811 7.754980 1.803506 173.011811 2.301181 8.588125 0.000000 2.301181 0.616600 0.000000 1.348001 7.856724 0.000000 1.348001 1.348001 0.000000 0.616600 6.903543 0.000000 0.616600 2.301181 0.000000 0.156822 5.793541 0.000000 0.156822 3.411183 0.000000 0.000000 4.602362 0.000000 7.304665 1.216644 173.011811 7.271732 0.966496 173.011811 7.304665 0.716348 173.011811 7.401218 1.449744 173.011811 7.554812 1.649912 173.011811 7.401218 0.483248 173.011811 7.554812 0.283080 173.011811 8.721476 1.803506 173.011811 9.075238 0.483248 173.011811 9.171792 1.216644 173.011811 9.171792 0.716348 173.011811 9.047903 3.411183 0.000000 9.204724 0.966496 173.011811 9.204724 4.602362 0.000000 9.047903 5.793541 0.000000 + + + + + + + + + + 0.499984 -0.866000 -0.007692 0.707080 -0.707080 -0.008704 -0.499984 0.866000 0.007692 -0.707080 0.707080 0.008704 0.000000 0.000000 1.000000 0.865952 0.499950 0.013322 0.706958 0.706943 0.021010 -0.706958 -0.706943 -0.021010 -0.865952 -0.499950 -0.013322 0.499806 0.865662 0.028695 0.000000 0.000000 -1.000000 -0.258812 -0.965908 0.006155 -0.000000 -1.000000 0.000000 0.258812 0.965908 -0.006155 0.000000 1.000000 -0.000000 -0.499377 0.864960 0.049661 -0.258525 0.964875 0.046702 0.499377 -0.864960 -0.049661 0.258525 -0.964875 -0.046702 -0.964875 0.258525 0.046702 -0.999118 -0.000015 0.041993 0.964875 -0.258525 -0.046702 0.999118 0.000015 -0.041993 -0.864960 0.499377 0.049661 0.864960 -0.499377 -0.049661 -0.706198 0.706198 0.050670 -0.865662 -0.499806 0.028695 -0.965301 -0.258668 0.035852 0.865662 0.499806 -0.028695 0.965301 0.258668 -0.035852 -0.706943 -0.706958 0.021010 0.706943 0.706958 -0.021010 -0.499950 -0.865952 0.013322 0.499950 0.865952 -0.013322 0.706198 -0.706198 -0.050670 0.000015 0.999118 0.041993 -0.000015 -0.999118 -0.041993 0.258668 0.965301 0.035852 -0.258668 -0.965301 -0.035852 -0.499806 -0.865662 -0.028695 0.258815 -0.965915 -0.004723 -0.258815 0.965915 0.004723 0.866000 -0.499984 -0.007692 -0.866000 0.499984 0.007692 0.965915 -0.258815 -0.004723 -0.965915 0.258815 0.004723 1.000000 -0.000000 -0.000000 -1.000000 0.000000 0.000000 0.965908 0.258812 0.006155 -0.965908 -0.258812 -0.006155 + + + + + + + + + + -5.852311 0.032047 -7.053769 0.032047 -6.998038 173.050301 -7.250345 173.050301 -8.921644 0.283080 -8.921644 1.649912 -8.721476 0.129486 -9.075238 1.449744 4.374496 172.849918 -0.248830 -0.186762 4.122190 172.849918 -1.450288 -0.186762 1.450288 -0.272173 0.248830 -0.272173 6.073614 172.792468 7.856724 7.856724 7.856724 1.348001 6.903543 8.588125 8.588125 2.301181 6.903543 0.616600 5.793541 9.047903 5.793541 0.156822 4.602362 9.204724 4.602362 0.000000 3.411183 9.047903 3.411183 0.156822 -3.361531 0.001648 -4.562988 0.001648 -7.915443 173.014110 -8.167749 173.014110 -8.238228 0.000000 -8.238228 1.932992 -7.988081 0.032933 -8.488376 0.032933 -8.488376 1.900060 -7.988081 1.900060 -7.754980 0.129486 -7.754980 1.803506 7.854840 172.873193 8.107146 172.873193 5.412547 -0.340251 6.614005 -0.340251 2.301181 8.588125 2.301181 0.616600 1.348001 7.856724 1.348001 1.348001 0.616600 6.903543 0.616600 2.301181 0.156822 5.793541 0.156822 3.411183 0.000000 4.602362 5.764446 -0.026654 4.562988 -0.026654 2.159685 173.155712 1.907379 173.155712 -7.271732 0.966496 -7.304665 0.716348 -7.304665 1.216644 -7.401218 1.449744 3.919406 173.113487 4.171712 173.113487 5.412547 -0.099957 6.614005 -0.099957 5.655735 173.043996 5.908042 173.043996 5.852311 -0.186553 -7.554812 1.649912 -7.401218 0.483248 -2.385861 173.148216 -2.133555 173.148216 1.890052 0.046688 3.091509 0.046688 -0.243230 173.166217 0.009076 173.166217 3.361531 0.023367 4.562988 0.023367 1.450288 0.046698 0.248830 0.046698 -4.122190 173.111339 -4.374496 173.111339 -7.554812 0.283080 -6.073614 173.068724 -5.821307 173.068724 -1.450288 0.032043 -0.248830 0.032043 -1.890052 0.013722 -3.091509 0.013722 -7.115115 173.033278 -7.367422 173.033278 6.998038 172.958636 7.250345 172.958636 5.852311 -0.271913 7.053769 -0.271913 7.053769 -0.186553 8.167749 172.804102 8.420055 172.804102 4.562988 -0.378265 5.764446 -0.378265 7.915443 172.764498 8.167749 172.764498 3.361531 -0.378351 4.562988 -0.378351 7.115115 172.761058 7.367422 172.761058 1.890052 -0.340471 3.091509 -0.340471 5.821307 172.792468 -8.721476 1.803506 -8.107146 173.029327 -7.854840 173.029327 -6.614005 0.013722 -5.412547 0.013722 -8.420055 173.014110 -8.167749 173.014110 -5.764446 0.001648 -4.562988 0.001648 -9.075238 0.483248 -9.171792 1.216644 -5.655735 173.064964 -5.852311 0.046710 -5.908042 173.064964 -7.053769 0.046710 -3.919406 173.062315 -5.412547 0.046711 -4.171712 173.062315 -6.614005 0.046711 -1.907379 173.035847 -4.562988 0.023385 -2.159685 173.035847 -5.764446 0.023385 0.243230 172.985781 -3.361531 -0.026680 -0.009076 172.985781 -4.562988 -0.026680 2.385861 172.919487 -1.890052 -0.100069 2.133555 172.919487 -3.091509 -0.100069 9.047903 5.793541 9.047903 3.411183 8.588125 6.903543 9.204724 4.602362 -9.204724 0.966496 -9.171792 0.716348 + + + + + + + + + + + + + + +

0 0 0 1 1 1 2 0 2 2 2 2 1 3 1 0 2 0 3 1 3 2 0 2 1 1 1 1 3 1 2 2 2 3 3 3 3 4 4 4 4 5 2 4 6 5 4 7 4 4 5 3 4 4 5 5 8 6 5 9 4 6 10 4 7 10 6 8 9 5 8 8 7 6 11 4 6 10 6 5 9 6 8 9 4 7 10 7 7 11 7 6 12 8 9 13 4 6 14 7 10 15 1 10 16 8 10 17 9 10 18 1 10 16 7 10 15 7 4 15 1 4 16 9 4 18 8 4 17 1 4 16 7 4 15 1 10 16 0 10 19 8 10 17 8 4 17 0 4 19 1 4 16 8 10 17 0 10 19 10 10 20 10 4 20 0 4 19 8 4 17 0 10 19 11 10 21 10 10 20 10 4 20 11 4 21 0 4 19 10 10 20 11 10 21 12 10 22 12 4 22 11 4 21 10 4 20 11 10 21 13 10 23 12 10 22 12 4 22 13 4 23 11 4 21 12 10 22 13 10 23 14 10 24 14 4 24 13 4 23 12 4 22 13 10 23 15 10 25 14 10 24 15 11 26 13 12 27 16 11 28 16 13 28 13 14 27 15 13 26 17 12 29 16 11 28 13 12 27 13 14 27 16 13 28 17 14 29 17 4 30 18 4 31 16 4 32 19 4 33 18 4 31 17 4 30 20 4 34 18 4 31 19 4 33 19 10 33 18 10 31 20 10 34 17 10 30 18 10 31 19 10 33 16 10 32 18 10 31 17 10 30 18 4 31 21 4 35 16 4 32 16 10 32 21 10 35 18 10 31 16 4 32 21 4 35 22 4 36 22 10 36 21 10 35 16 10 32 21 4 35 23 4 37 22 4 36 23 15 38 21 16 39 24 15 40 24 17 40 21 18 39 23 17 38 14 16 41 24 15 40 21 16 39 21 18 39 24 17 40 14 18 41 14 10 24 15 10 25 24 10 42 24 4 42 15 4 25 14 4 24 15 10 25 25 10 43 24 10 42 24 4 42 25 4 43 15 4 25 24 10 42 25 10 43 26 10 44 26 4 44 25 4 43 24 4 42 25 10 43 27 10 45 26 10 44 26 4 44 27 4 45 25 4 43 26 10 44 27 10 45 28 10 46 28 4 46 27 4 45 26 4 44 27 10 45 29 10 47 28 10 46 28 4 46 29 4 47 27 4 45 28 10 46 29 10 47 30 10 48 30 4 48 29 4 47 28 4 46 29 10 47 31 10 49 30 10 48 30 4 48 31 4 49 29 4 47 32 10 50 30 10 48 31 10 49 31 4 49 30 4 48 32 4 50 30 19 51 32 20 52 33 19 53 33 21 53 32 22 52 30 21 51 34 20 54 33 19 53 32 20 52 32 22 52 33 21 53 34 22 54 34 4 55 35 4 56 33 4 57 33 10 57 35 10 56 34 10 55 36 4 58 33 4 57 35 4 56 33 19 59 36 23 60 30 19 61 30 21 61 36 24 60 33 21 59 28 23 62 30 19 61 36 23 60 36 24 60 30 21 61 28 24 62 36 23 63 37 25 64 28 23 65 37 4 66 36 4 58 38 4 67 38 10 67 36 10 58 37 10 66 38 4 67 36 4 58 35 4 56 35 10 56 36 10 58 38 10 67 35 10 56 33 10 57 36 10 58 38 26 68 35 27 69 29 26 70 29 28 70 35 29 69 38 28 68 31 27 71 29 26 70 35 27 69 35 29 69 29 28 70 31 29 71 35 27 72 34 20 73 31 27 74 31 29 74 34 22 73 35 29 72 32 20 75 31 27 74 34 20 73 34 22 73 31 29 74 32 22 75 29 26 76 27 30 77 38 26 78 38 28 78 27 31 77 29 28 76 39 30 79 38 26 78 27 30 77 27 31 77 38 28 78 39 31 79 39 4 80 37 4 66 38 4 67 22 4 36 37 4 66 39 4 80 23 4 37 37 4 66 22 4 36 22 10 36 37 10 66 23 10 37 39 10 80 37 10 66 22 10 36 38 10 67 37 10 66 39 10 80 22 32 81 39 30 82 25 32 83 25 33 83 39 31 82 22 33 81 27 30 84 25 32 83 39 30 82 39 31 82 25 33 83 27 31 84 25 32 85 15 11 86 22 32 87 22 33 87 15 13 86 25 33 85 16 11 88 22 32 87 15 11 86 15 13 86 22 33 87 16 13 88 37 25 89 23 15 90 26 25 91 26 34 91 23 17 90 37 34 89 24 15 92 26 25 91 23 15 90 23 17 90 26 34 91 24 17 92 26 25 93 28 23 65 37 25 64 37 34 64 28 24 65 26 34 93 28 24 65 37 34 64 36 24 63 22 10 36 23 10 37 21 10 35 14 4 24 15 4 25 13 4 23 21 16 94 18 35 95 14 16 96 14 18 96 18 36 95 21 18 94 12 35 97 14 16 96 18 35 95 18 36 95 14 18 96 12 36 97 18 35 98 20 37 99 12 35 100 12 36 100 20 38 99 18 36 98 10 37 101 12 35 100 20 37 99 20 38 99 12 36 100 10 38 101 20 37 102 40 9 103 10 37 104 10 38 104 40 39 103 20 38 102 8 9 105 10 37 104 40 9 103 40 39 103 10 38 104 8 39 105 40 9 106 4 6 14 8 9 13 8 39 13 4 7 14 40 39 106 4 7 14 8 39 13 7 7 12 4 4 5 40 4 107 2 4 6 2 10 6 40 10 107 4 10 5 2 4 6 40 4 107 19 4 33 19 10 33 40 10 107 2 10 6 40 4 107 20 4 34 19 4 33 19 10 33 20 10 34 40 10 107 2 0 108 19 40 109 0 0 110 0 2 110 19 41 109 2 2 108 11 40 111 0 0 110 19 40 109 19 41 109 0 2 110 11 41 111 19 40 112 17 12 113 11 40 114 11 41 114 17 14 113 19 41 112 13 12 115 11 40 114 17 12 113 17 14 113 11 41 114 13 14 115 2 10 6 4 10 5 3 10 4 3 10 4 4 10 5 5 10 7 41 4 116 5 4 7 3 4 4 42 4 117 5 4 7 41 4 116 41 10 116 5 10 7 42 10 117 3 10 4 5 10 7 41 10 116 3 1 118 1 1 119 41 42 120 41 43 120 1 3 119 3 3 118 9 42 121 41 42 120 1 1 119 1 3 119 41 43 120 9 43 121 41 42 122 9 42 123 43 44 124 43 45 124 9 43 123 41 43 122 44 44 125 43 44 124 9 42 123 9 43 123 43 45 124 44 45 125 43 44 126 44 44 127 45 46 128 45 47 128 44 45 127 43 45 126 46 46 129 45 46 128 44 44 127 44 45 127 45 47 128 46 47 129 45 46 130 46 46 131 42 48 132 42 49 132 46 47 131 45 47 130 47 48 133 42 48 132 46 46 131 42 48 134 47 48 135 5 5 136 5 8 136 47 49 135 42 49 134 6 5 137 5 5 136 47 48 135 47 49 135 5 8 136 6 8 137 47 10 138 44 10 139 6 10 140 46 10 141 44 10 139 47 10 138 47 4 138 44 4 139 46 4 141 6 4 140 44 4 139 47 4 138 44 10 139 9 10 18 6 10 140 6 4 140 9 4 18 44 4 139 6 10 140 9 10 18 7 10 15 7 4 15 9 4 18 6 4 140 46 47 131 42 49 132 47 49 133 45 4 142 42 4 117 43 4 143 43 10 143 42 10 117 45 10 142 43 4 143 42 4 117 41 4 116 41 10 116 42 10 117 43 10 143

+
+
+
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + -0.878778 -0.450894 -0.156347 8.010048 + 0.317483 -0.796952 0.513879 7.249371 + -0.602158 0.679292 1.425507 0.752995 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.820644 -0.566253 -0.076819 13.001435 + -0.446755 -0.719580 0.531616 13.664093 + -0.602158 -0.679292 -1.425507 6.322199 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.214375 -0.843495 0.492503 7.435375 + 0.909444 0.356306 0.214375 7.064178 + -0.602158 0.679292 1.425507 1.002806 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.118138 0.876118 -0.467397 7.798925 + -0.926871 -0.266185 -0.264681 7.594753 + -0.602158 0.679292 1.425507 0.752995 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.921185 -0.000000 0.389124 7.492585 + 0.156408 0.915663 -0.370268 7.304003 + -0.602158 0.679292 1.425507 0.334269 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.491488 -0.848388 0.196666 7.839891 + 0.794660 -0.344494 0.499838 7.001824 + -0.602158 0.679292 1.425507 0.668537 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.719566 -0.457831 0.522125 11.646254 + -0.596046 0.792987 -0.126100 -0.000000 + -0.602158 -0.679292 -1.425507 5.987930 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.446755 -0.719580 0.531616 13.922540 + -0.820644 0.566253 0.076819 2.003110 + -0.602158 -0.679292 -1.425507 6.322199 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + + + + + + + + + + + + 0.668304 -0.000000 0.000000 2.549561 + 0.000000 0.765000 0.000000 1.342976 + 0.000000 0.000000 1.024144 8.074803 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 1.000000 -0.000000 0.000000 -0.000000 + -0.000000 1.000000 0.000000 0.000000 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.746808 0.357810 0.000000 -0.738420 + -0.357810 0.746808 0.000000 4.797042 + 0.000000 -0.000000 1.061936 3.342520 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.516840 -0.341572 0.000000 6.193467 + 0.298398 0.591621 0.000000 0.420766 + -0.000000 -0.000000 1.106618 13.177165 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.803857 -0.594823 0.000000 7.389999 + 0.594823 0.803857 -0.000000 6.718752 + 0.000000 0.000000 1.000000 0.788029 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.477914 0.000000 -0.000000 3.974546 + 0.000000 0.683145 -0.000000 1.832172 + 0.000000 -0.000000 1.394339 18.496063 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + + + + + + + + + + + + 0.000000 -0.506005 0.358272 7.462132 + 0.000000 0.358272 0.506005 10.949792 + -0.620000 -0.000000 0.000000 20.383389 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.205480 0.183339 0.555486 16.937446 + -0.063607 0.592273 -0.171952 9.514869 + -0.581491 0.000000 0.215100 21.112150 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 -0.240260 0.571555 25.561163 + 0.000000 0.571555 0.240260 9.477827 + -0.620000 -0.000000 0.000000 20.383389 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.160468 -0.000000 0.598874 32.215965 + -0.000000 0.620000 0.000000 9.063350 + -0.598874 -0.000000 -0.160468 20.055229 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 0.536936 0.310000 -0.706332 + -0.000000 0.310000 -0.536936 11.268481 + -0.620000 0.000000 0.000000 20.383389 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.340662 -0.289160 0.429809 9.524146 + 0.179611 0.548440 0.226613 7.410012 + -0.485890 0.000000 0.385112 19.440640 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.387650 -0.000000 0.483867 17.038593 + -0.000000 0.620000 0.000000 8.752866 + -0.483867 -0.000000 -0.387650 19.338669 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 0.412443 0.462916 8.629219 + -0.000000 0.462916 -0.412443 10.586109 + -0.620000 0.000000 0.000000 20.383389 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.508494 -0.000000 0.354730 5.586799 + -0.000000 0.620000 0.000000 8.752866 + -0.354730 -0.000000 -0.508494 18.225869 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 1.070000 -0.000000 -0.000000 -1.785459 + 0.000000 0.839711 0.619779 12.987293 + 0.000000 -0.663164 0.784776 15.769970 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 -0.414861 0.460750 18.345236 + 0.000000 0.460750 0.414861 9.990032 + -0.620000 -0.000000 0.000000 20.383389 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + 0.340662 -0.289160 0.429809 16.547873 + 0.179611 0.548440 0.226613 21.468558 + -0.485890 0.000000 0.385112 30.310719 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.508494 -0.000000 0.354730 23.953046 + -0.000000 0.620000 0.000000 22.811412 + -0.354730 -0.000000 -0.508494 29.095948 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.160468 -0.000000 0.598874 50.582211 + -0.000000 0.620000 0.000000 23.121896 + -0.598874 0.000000 -0.160468 30.925308 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.340662 -0.289160 0.429809 24.736849 + 0.179611 0.548440 0.226613 25.594542 + -0.485890 0.000000 0.385112 37.460325 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 0.412443 0.462916 26.995465 + -0.000000 0.462916 -0.412443 24.644655 + -0.620000 0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 -0.414861 0.460750 36.711483 + 0.000000 0.460750 0.414861 24.048578 + -0.620000 -0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 -0.506005 0.358272 12.753575 + 0.000000 0.358272 0.506005 25.008338 + -0.620000 -0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 -0.506005 0.358272 25.222079 + 0.000000 0.358272 0.506005 25.008338 + -0.620000 -0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 1.605000 0.000000 0.000000 -0.000000 + 0.000000 1.259566 0.619779 26.649125 + -0.000000 -0.994745 0.784776 26.953354 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 -0.506005 0.358272 18.572473 + 0.000000 0.358272 0.506005 32.598890 + -0.620000 -0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.205480 0.183339 0.555486 35.303693 + -0.063607 0.592273 -0.171952 23.573415 + -0.581491 0.000000 0.215100 31.982228 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 0.536936 0.310000 6.242591 + -0.000000 0.310000 -0.536936 25.327027 + -0.620000 0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 0.536936 0.310000 17.659914 + -0.000000 0.310000 -0.536936 25.327027 + -0.620000 0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 0.536936 0.310000 12.368576 + -0.000000 0.310000 -0.536936 14.362460 + -0.620000 0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 -0.240260 0.571555 43.927410 + 0.000000 0.571555 0.240260 23.536373 + -0.620000 -0.000000 0.000000 31.253468 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.387650 -0.000000 0.483867 35.404840 + -0.000000 0.620000 0.000000 22.811412 + -0.483867 0.000000 -0.387650 30.208748 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.340662 -0.289160 0.429809 27.890393 + 0.179611 0.548440 0.226613 21.468558 + -0.485890 0.000000 0.385112 30.310719 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.508494 -0.000000 0.354730 20.157770 + -0.000000 0.620000 0.000000 22.811412 + -0.354730 -0.000000 -0.508494 18.225869 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.508494 -0.000000 0.354730 12.610526 + -0.000000 0.620000 0.000000 22.811412 + -0.354730 -0.000000 -0.508494 29.095948 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + + + + + + + + + + + + 0.707107 0.707107 0.000000 31.816857 + -0.707107 0.707107 0.000000 51.006538 + -0.000000 -0.000000 1.000000 10.870079 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.978148 -0.000000 0.207912 53.780464 + -0.000000 1.000000 0.000000 48.488457 + -0.207912 0.000000 0.978148 11.361223 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.781520 -0.623880 0.000000 56.650697 + 0.623880 0.781520 0.000000 52.618073 + -0.000000 0.000000 1.000000 10.870079 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.610145 -0.792290 0.000000 43.628285 + 0.792290 0.610145 0.000000 46.996955 + -0.000000 0.000000 1.000000 -0.000000 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.000000 0.000000 1.000000 3.915556 + -0.000000 1.000000 -0.000000 61.121458 + -1.000000 -0.000000 0.000000 27.303238 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.381070 0.924546 0.000000 -8.414837 + -0.924546 0.381070 0.000000 52.054847 + -0.000000 -0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + 0.854938 0.021937 -0.518266 27.506611 + 0.210857 0.898143 0.385849 24.405241 + 0.473941 -0.439157 0.763231 10.200469 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.073499 0.852056 -0.518266 10.186242 + -0.869041 0.309658 0.385849 34.376815 + 0.489250 0.422035 0.763231 -2.695640 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.836923 0.523628 -0.159290 8.156225 + -0.497448 0.849114 0.177627 32.291563 + 0.228266 -0.069421 0.971121 -0.389430 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + 1.000000 0.000000 -0.000000 33.267632 + -0.000000 1.000000 0.000000 24.125790 + 0.000000 -0.000000 1.000000 1.841853 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.691655 0.691655 -0.207912 11.886097 + -0.707107 0.707107 -0.000000 26.643871 + 0.147016 0.147016 0.978148 -3.205049 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.764442 -0.610246 -0.207912 36.177257 + 0.623880 0.781520 -0.000000 28.255406 + 0.162487 -0.129712 0.978148 1.958196 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.207912 0.000000 0.498073 9.171515 + -0.000000 1.000000 0.000000 36.758791 + -0.978148 -0.000000 0.105869 13.018235 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + + + + + + + + + + + + -0.276349 -0.943266 -0.184068 35.494753 + 0.916022 -0.200581 -0.347377 23.493206 + 0.290748 -0.264608 0.919483 3.373251 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.383129 -0.923650 -0.009056 36.451658 + 0.921317 0.381421 0.075450 15.124067 + -0.066235 -0.037250 0.997108 1.092918 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.073264 -0.404722 0.085496 18.807354 + 0.138266 0.184534 0.193025 9.118385 + -0.365980 -0.011304 0.090039 10.862843 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.906583 -0.379772 -0.184068 29.031177 + 0.301439 0.887955 -0.347377 5.841537 + 0.295368 0.259441 0.919483 -5.865678 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + -0.043893 0.918733 0.029054 41.772311 + -0.856305 -0.048187 0.481615 89.572320 + 0.333490 -0.002810 1.240469 30.402057 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.448964 -0.784766 0.246262 144.825826 + 0.711080 -0.479136 -0.482426 138.481867 + 0.373091 -0.031164 1.215805 55.593829 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.607480 -0.615119 -0.455198 150.469031 + 0.551114 0.684080 -0.395435 63.781864 + 0.416703 -0.008000 1.186586 30.082469 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.978148 0.000000 -0.207912 104.089012 + -0.000000 1.000000 -0.000000 36.271848 + 0.207912 0.000000 0.978148 18.604913 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.533090 0.866025 0.103956 42.570690 + -0.923340 -0.500000 0.180057 127.074635 + 0.226624 -0.000000 0.978148 2.571054 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.725710 -0.216906 -0.652914 113.360256 + 0.540298 0.767198 0.345665 48.692148 + 0.425938 -0.603621 0.673957 188.152587 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.920454 -0.418925 0.290679 122.127120 + 0.416968 -0.907798 -0.155061 129.833405 + 0.351855 -0.020115 0.944173 105.570273 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.839508 0.720189 0.151553 78.809458 + -0.880801 -0.693313 0.121484 128.293775 + 0.238781 -0.025404 0.980955 92.254410 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 1.000000 0.000000 0.000000 94.788410 + 0.000000 1.000000 0.000000 94.625318 + 0.000000 0.000000 1.000000 0.000000 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.152538 0.978428 -0.139320 97.986640 + -0.984635 -0.138331 0.106573 113.540696 + 0.085001 0.153436 0.984496 126.559695 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.940282 -0.500000 0.199863 126.424384 + 0.542872 -0.866025 -0.115391 154.743002 + 0.230782 -0.000000 1.085744 11.489690 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.861406 -0.483295 0.156222 121.304263 + 0.499281 -0.862189 0.085721 94.450999 + 0.093265 0.151839 0.983995 138.634271 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.723310 0.463992 -0.475303 82.890799 + -0.434589 0.794006 0.238102 42.245393 + 0.366545 0.025800 1.220227 39.235432 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.457279 0.886811 -0.066799 63.952769 + -0.868225 0.461434 0.182386 84.024309 + 0.192565 -0.025404 0.980955 101.541505 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.528200 0.866025 -0.103956 47.565933 + -0.914869 0.500000 0.180057 62.661866 + 0.224545 -0.000000 0.978148 9.897651 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.402641 -0.909590 -0.102596 134.951961 + 0.894874 0.414730 -0.164926 75.210337 + 0.192565 -0.025404 0.980955 114.049379 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.161558 0.958776 -0.233769 99.395451 + -0.976480 -0.121036 0.178432 112.424049 + 0.142782 0.257098 0.955779 143.349354 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.262449 -0.950436 0.166711 105.428913 + 0.964662 0.254236 -0.069222 76.210793 + 0.023407 0.178987 0.983573 147.116890 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.278725 -0.963940 -0.081339 161.539550 + 0.970470 0.264761 -0.389288 92.216406 + 0.436463 0.026879 0.917518 -4.897261 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.861406 -0.483295 0.156222 120.976538 + 0.499281 -0.862189 0.085721 94.408989 + 0.093265 0.151839 0.983995 123.754986 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.431976 -0.398749 0.861206 111.736257 + 0.187828 -0.917060 -0.374463 142.225547 + 1.287188 -0.000000 0.343660 155.525741 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.987318 -0.003413 -0.158716 86.463455 + -0.029417 0.978522 -0.204035 88.929487 + 0.156004 0.206116 0.966012 127.221873 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.747300 0.651800 0.129227 78.452757 + -0.297251 -0.501847 0.812276 110.617916 + 0.594293 0.568601 0.568778 131.353008 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.728152 -0.677248 -0.105494 123.108411 + 0.682957 0.703871 0.195284 62.601943 + -0.058001 -0.214244 0.975057 102.364040 + 0.000000 0.000000 0.000000 1.000000 + + + + + + -0.865860 0.166444 0.379978 82.415319 + -0.166292 -0.904501 0.036153 153.845647 + 0.262741 -0.023955 1.275096 27.845643 + 0.000000 0.000000 0.000000 1.000000 + + + + + + 0.989877 -0.006651 -0.141771 94.907568 + 0.009131 0.999816 0.016848 58.610041 + 0.141633 -0.017972 0.989756 84.897646 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + + + + 46.666667 + 35.000000 + 25.400000 + 25400.000000 + + + + + + + + + + + 25.400000 0.000000 0.000000 0.000000 + 0.000000 25.400000 0.000000 0.000000 + 0.000000 0.000000 25.400000 0.000000 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + 0.997541 0.008550 0.069567 3750.183642 + 0.070091 -0.121690 -0.990090 -6979.137638 + -0.000000 0.992531 -0.121990 1761.078803 + 0.000000 0.000000 0.000000 1.000000 + + + + + + + + +
diff --git a/wild_visual_navigation_sim/Media/models/heightmap.png b/wild_visual_navigation_sim/Media/models/heightmap.png new file mode 100644 index 00000000..9ae5cb96 Binary files /dev/null and b/wild_visual_navigation_sim/Media/models/heightmap.png differ diff --git a/wild_visual_navigation_sim/Media/models/scenario.dae b/wild_visual_navigation_sim/Media/models/scenario.dae new file mode 100644 index 00000000..274d0727 --- /dev/null +++ b/wild_visual_navigation_sim/Media/models/scenario.dae @@ -0,0 +1,131 @@ + + + + + SketchUp 13.0.4812 + + 2014-02-25T13:59:49Z + 2014-02-25T13:59:49Z + + Z_UP + + + + + + 1 0 0 0 0 1 0 0 0 0 1 -30.63593 0 0 0 1 + + + + + + + + + + + + + + + + + + + + + 660 780 67.74807 630 810 72.26189 660 810 71.1444 660 810 71.1444 630 810 72.26189 660 780 67.74807 630 780 69.75813 630 780 69.75813 630 840 73.94185 630 840 73.94185 690 780 64.16516 690 780 64.16516 600 810 72.46089 600 810 72.46089 660 750 64.19096 660 750 64.19096 600 840 73.10264 600 840 73.10264 660 840 73.66674 660 840 73.66674 690 810 68.24277 690 810 68.24277 690 750 60.15922 690 750 60.15922 600 780 71.10169 600 780 71.10169 630 750 67.09122 630 750 67.09122 600 870 72.55073 600 870 72.55073 630 870 74.26275 630 870 74.26275 720 780 58.41905 720 780 58.41905 720 750 54.27543 720 750 54.27543 570 810 72.16587 570 810 72.16587 570 840 71.55468 570 840 71.55468 660 720 61.19768 660 720 61.19768 690 720 56.92948 690 720 56.92948 570 870 69.94489 570 870 69.94489 660 870 74.7343 660 870 74.7343 690 840 71.60843 690 840 71.60843 720 810 62.87834 720 810 62.87834 720 720 51.16534 720 720 51.16534 570 780 72.14487 570 780 72.14487 600 750 69.60865 600 750 69.60865 630 720 64.93086 630 720 64.93086 570 900 67.15359 570 900 67.15359 600 900 70.51968 600 900 70.51968 630 900 72.91255 630 900 72.91255 750 780 50.59566 750 780 50.59566 750 750 46.5174 750 750 46.5174 750 720 43.82615 750 720 43.82615 540 810 71.71894 540 810 71.71894 540 840 69.73853 540 840 69.73853 540 870 67.03466 540 870 67.03466 660 690 59.38238 660 690 59.38238 690 690 54.98671 690 690 54.98671 720 690 49.58975 720 690 49.58975 540 900 63.49212 540 900 63.49212 660 900 73.99907 660 900 73.99907 690 870 73.53183 690 870 73.53183 720 840 66.90872 720 840 66.90872 750 810 55.37443 750 810 55.37443 750 690 42.95481 750 690 42.95481 540 780 73.24334 540 780 73.24334 570 750 72.01906 570 750 72.01906 600 720 68.57003 600 720 68.57003 630 690 63.83905 630 690 63.83905 540 930 59.19399 540 930 59.19399 570 930 63.23145 570 930 63.23145 600 930 67.02495 600 930 67.02495 630 930 69.86743 630 930 69.86743 780 780 41.42394 780 780 41.42394 780 750 37.70167 780 750 37.70167 780 720 35.7184 780 720 35.7184 780 690 35.81578 780 690 35.81578 510 810 71.62942 510 810 71.62942 510 840 68.29858 510 840 68.29858 510 870 64.52738 510 870 64.52738 510 900 60.27147 510 900 60.27147 660 660 59.14124 660 660 59.14124 690 660 54.57167 690 660 54.57167 720 660 50.04808 720 660 50.04808 750 660 45.01347 750 660 45.01347 510 930 55.64667 510 930 55.64667 660 930 71.41362 660 930 71.41362 690 900 73.46823 690 900 73.46823 720 870 69.75487 720 870 69.75487 750 840 60.02163 750 840 60.02163 780 810 46.26595 780 810 46.26595 780 660 39.43898 780 660 39.43898 510 780 74.68558 510 780 74.68558 540 750 74.65881 540 750 74.65881 570 720 72.29565 570 720 72.29565 600 690 68.47005 600 690 68.47005 630 660 64.10427 630 660 64.10427 510 960 50.93729 510 960 50.93729 540 960 54.43282 540 960 54.43282 570 960 58.47643 570 960 58.47643 600 960 62.36633 600 960 62.36633 630 960 65.41217 630 960 65.41217 810 780 32.12907 810 780 32.12907 810 750 28.97199 810 750 28.97199 810 720 27.87606 810 720 27.87606 810 690 29.07139 810 690 29.07139 810 660 33.5251 810 660 33.5251 480 810 72.35844 480 810 72.35844 480 840 67.77836 480 840 67.77836 480 870 63.0234 480 870 63.0234 480 900 58.11971 480 900 58.11971 480 930 53.21477 480 930 53.21477 660 630 59.45663 660 630 59.45663 690 630 56.04568 690 630 56.04568 720 630 53.66572 720 630 53.66572 750 630 50.76223 750 630 50.76223 780 630 46.80726 780 630 46.80726 480 960 48.58281 480 960 48.58281 660 960 67.26015 660 960 67.26015 690 930 71.39179 690 930 71.39179 720 900 70.80238 720 900 70.80238 750 870 63.7399 750 870 63.7399 780 840 51.59563 780 840 51.59563 810 810 37.02904 810 810 37.02904 810 630 41.82188 810 630 41.82188 480 780 76.8232 480 780 76.8232 510 750 77.69233 510 750 77.69233 540 720 76.33473 540 720 76.33473 570 690 73.35648 570 690 73.35648 600 660 69.26947 600 660 69.26947 630 630 64.38862 630 630 64.38862 480 990 44.60247 480 990 44.60247 510 990 46.57152 510 990 46.57152 540 990 49.68385 540 990 49.68385 570 990 53.40521 570 990 53.40521 600 990 57.09332 600 990 57.09332 630 990 60.10829 630 990 60.10829 840 780 23.88038 840 780 23.88038 840 750 21.40652 840 750 21.40652 840 720 21.33382 840 720 21.33382 840 690 23.25117 840 690 23.25117 840 660 27.70734 840 660 27.70734 840 630 36.13948 840 630 36.13948 450 810 73.84577 450 810 73.84577 450 840 68.45085 450 840 68.45085 450 870 62.90899 450 870 62.90899 450 900 57.45204 450 900 57.45204 450 930 52.30694 450 930 52.30694 450 960 47.68674 450 960 47.68674 660 600 60.90635 660 600 60.90635 690 600 59.67552 690 600 59.67552 720 600 59.34409 720 600 59.34409 750 600 58.63877 750 600 58.63877 780 600 56.50928 780 600 56.50928 810 600 52.62285 810 600 52.62285 450 990 43.78215 450 990 43.78215 660 990 62.11068 660 990 62.11068 690 960 67.69413 690 960 67.69413 720 930 69.84488 720 930 69.84488 750 900 66.45909 750 900 66.45909 780 870 57.20633 780 870 57.20633 810 840 43.60451 810 840 43.60451 840 810 29.25567 840 810 29.25567 840 600 47.20276 840 600 47.20276 450 780 78.86622 450 780 78.86622 480 750 80.88682 480 750 80.88682 510 720 80.60004 510 720 80.60004 540 690 78.22076 540 690 78.22076 570 660 74.43785 570 660 74.43785 600 630 69.59576 600 630 69.59576 630 600 64.17871 630 600 64.17871 450 1020 40.75403 450 1020 40.75403 480 1020 41.70491 480 1020 41.70491 510 1020 43.06681 510 1020 43.06681 540 1020 45.54382 540 1020 45.54382 570 1020 48.68597 570 1020 48.68597 600 1020 51.93275 600 1020 51.93275 630 1020 54.71796 630 1020 54.71796 870 780 17.90366 870 780 17.90366 870 750 15.40589 870 750 15.40589 870 720 16.08424 870 720 16.08424 870 690 18.11137 870 690 18.11137 870 660 22.06817 870 660 22.06817 870 630 29.85771 870 630 29.85771 870 600 40.29642 870 600 40.29642 420 810 74.70064 420 810 74.70064 420 840 68.96622 420 840 68.96622 420 870 63.08644 420 870 63.08644 420 900 57.30106 420 900 57.30106 420 930 51.84465 420 930 51.84465 420 960 46.93732 420 960 46.93732 420 990 42.77615 420 990 42.77615 660 570 63.53531 660 570 63.53531 690 570 64.57015 690 570 64.57015 720 570 66.37322 720 570 66.37322 750 570 67.44463 750 570 67.44463 780 570 66.93671 780 570 66.93671 810 570 64.22732 810 570 64.22732 840 570 59.17117 840 570 59.17117 420 1020 39.52744 420 1020 39.52744 660 1020 56.66616 660 1020 56.66616 690 990 62.93971 690 990 62.93971 720 960 67.27847 720 960 67.27847 750 930 67.73206 750 930 67.73206 780 900 62.04656 780 900 62.04656 810 870 50.84538 810 870 50.84538 840 840 36.82275 840 840 36.82275 870 810 23.8012 870 810 23.8012 870 570 51.71155 870 570 51.71155 420 780 80.05414 420 780 80.05414 450 750 83.29802 450 750 83.29802 480 720 84.20174 480 720 84.20174 510 690 82.70958 510 690 82.70958 540 660 79.41033 540 660 79.41033 570 630 74.80373 570 630 74.80373 600 600 69.26947 600 600 69.26947 630 570 64.54242 630 570 64.54242 420 1050 37.32023 420 1050 37.32023 450 1050 38.72702 450 1050 38.72702 480 1050 39.92779 480 1050 39.92779 510 1050 40.95382 510 1050 40.95382 540 1050 42.64467 540 1050 42.64467 570 1050 45.04295 570 1050 45.04295 600 1050 47.67783 600 1050 47.67783 630 1050 49.88357 630 1050 49.88357 900 780 14.99755 900 780 14.99755 900 750 11.77014 900 750 11.77014 900 720 11.73146 900 720 11.73146 900 690 13.67202 900 690 13.67202 900 660 16.87055 900 660 16.87055 900 630 23.28206 900 630 23.28206 900 600 32.31257 900 600 32.31257 900 570 42.40998 900 570 42.40998 390 810 74.72865 390 810 74.72865 390 840 68.72862 390 840 68.72862 390 870 62.59059 390 870 62.59059 390 900 56.56037 390 900 56.56037 390 930 50.87821 390 930 50.87821 390 960 45.7694 390 960 45.7694 390 990 41.4355 390 990 41.4355 390 1020 38.04652 390 1020 38.04652 660 540 66.44879 660 540 66.44879 690 540 69.62756 690 540 69.62756 720 540 73.48096 720 540 73.48096 750 540 76.27578 750 540 76.27578 780 540 77.00072 780 540 77.00072 810 540 75.04716 810 540 75.04716 840 540 70.25376 840 540 70.25376 870 540 62.32841 870 540 62.32841 390 1050 35.73432 390 1050 35.73432 660 1050 51.31761 660 1050 51.31761 690 1020 57.62467 690 1020 57.62467 720 990 63.50716 720 990 63.50716 750 960 66.98042 750 960 66.98042 780 930 65.29266 780 930 65.29266 810 900 57.68675 810 900 57.68675 840 870 45.5203 840 870 45.5203 870 840 32.27088 870 840 32.27088 900 810 21.44792 900 810 21.44792 900 540 51.92059 900 540 51.92059 390 780 80.3489 390 780 80.3489 420 750 84.80468 420 750 84.80468 450 720 86.9489 450 720 86.9489 480 690 86.61344 480 690 86.61344 510 660 83.9954 510 660 83.9954 540 630 79.81257 540 630 79.81257 570 600 74.43785 570 600 74.43785 600 570 68.30587 600 570 68.30587 630 540 65.29227 630 540 65.29227 390 1080 34.58751 390 1080 34.58751 420 1080 36.24129 420 1080 36.24129 450 1080 37.78464 450 1080 37.78464 480 1080 39.18586 480 1080 39.18586 510 1080 40.42933 510 1080 40.42933 540 1080 41.54302 540 1080 41.54302 570 1080 42.99007 570 1080 42.99007 600 1080 44.5968 600 1080 44.5968 630 1080 45.92739 630 1080 45.92739 930 780 14.5222 930 780 14.5222 930 750 9.925307 930 750 9.925307 930 720 8.624075 930 720 8.624075 930 690 10.1785 930 690 10.1785 930 660 12.8666 930 660 12.8666 930 630 17.32956 930 630 17.32956 930 600 24.32014 930 600 24.32014 930 570 32.47325 930 570 32.47325 930 540 40.31964 930 540 40.31964 360 810 73.93402 360 810 73.93402 360 840 67.74852 360 840 67.74852 360 870 61.438 360 870 61.438 360 900 55.25235 360 900 55.25235 360 930 49.43547 360 930 49.43547 360 960 44.21582 360 960 44.21582 360 990 39.79754 360 990 39.79754 360 1020 36.35253 360 1020 36.35253 360 1050 34.0138 360 1050 34.0138 660 510 68.72731 660 510 68.72731 690 510 73.73335 690 510 73.73335 720 510 79.35773 720 510 79.35773 750 510 83.71361 750 510 83.71361 780 510 85.61195 780 510 85.61195 810 510 84.36841 810 510 84.36841 840 510 79.70435 840 510 79.70435 870 510 71.31782 870 510 71.31782 900 510 60.05747 900 510 60.05747 360 1080 32.87038 360 1080 32.87038 660 1080 46.81678 660 1080 46.81678 690 1050 52.39914 690 1050 52.39914 720 1020 59.03095 720 1020 59.03095 750 990 64.45656 750 990 64.45656 780 960 66.65479 780 960 66.65479 810 930 63.21853 810 930 63.21853 840 900 54.26121 840 900 54.26121 870 870 42.28702 870 870 42.28702 900 840 30.74828 900 840 30.74828 930 810 22.22645 930 810 22.22645 930 510 47.30838 930 510 47.30838 360 780 79.74832 360 780 79.74832 390 750 85.36107 390 750 85.36107 420 720 88.75242 420 720 88.75242 450 690 89.65613 450 690 89.65613 480 660 88.01651 480 660 88.01651 510 630 84.43 510 630 84.43 540 600 79.41033 540 600 79.41033 570 570 73.35648 570 570 73.35648 600 540 67.05487 600 540 67.05487 630 510 65.71806 630 510 65.71806 360 1110 32.96389 360 1110 32.96389 390 1110 34.64759 390 1110 34.64759 420 1110 36.32381 420 1110 36.32381 450 1110 37.92604 450 1110 37.92604 480 1110 39.3905 480 1110 39.3905 510 1110 40.6586 510 1110 40.6586 540 1110 41.67923 540 1110 41.67923 570 1110 42.41076 570 1110 42.41076 600 1110 42.98687 600 1110 42.98687 630 1110 43.48425 630 1110 43.48425 960 780 15.59296 960 780 15.59296 960 750 9.681662 960 750 9.681662 960 720 7.074951 960 720 7.074951 960 690 7.694615 960 690 7.694615 960 660 9.982205 960 660 9.982205 960 630 12.86754 960 630 12.86754 960 600 17.40262 960 600 17.40262 960 570 23.18515 960 570 23.18515 960 540 29.01331 960 540 29.01331 960 510 34.67785 960 510 34.67785 330 810 72.35512 330 810 72.35512 330 840 66.0687 330 840 66.0687 330 870 59.67547 330 870 59.67547 330 900 53.42739 330 900 53.42739 330 930 47.56997 330 930 47.56997 330 960 42.33279 330 960 42.33279 330 990 37.92057 330 990 37.92057 330 1020 34.50526 330 1020 34.50526 330 1050 32.21938 330 1050 32.21938 330 1080 31.1509 330 1080 31.1509 660 480 69.59775 660 480 69.59775 690 480 75.95341 690 480 75.95341 720 480 82.89443 720 480 82.89443 750 480 88.53408 750 480 88.53408 780 480 91.46942 780 480 91.46942 810 480 90.9615 810 480 90.9615 840 480 86.52677 840 480 86.52677 870 480 77.98333 870 480 77.98333 900 480 66.31509 900 480 66.31509 930 480 52.96356 930 480 52.96356 330 1110 31.33935 330 1110 31.33935 660 1110 43.86153 660 1110 43.86153 690 1080 48.00512 690 1080 48.00512 720 1050 54.53784 720 1050 54.53784 750 1020 61.18522 750 1020 61.18522 780 990 66.02974 780 990 66.02974 810 960 66.82817 810 960 66.82817 840 930 62.06958 840 930 62.06958 870 900 52.7591 870 900 52.7591 900 870 41.92797 900 870 41.92797 930 840 32.56268 930 840 32.56268 960 810 24.58672 960 810 24.58672 960 480 39.60774 960 480 39.60774 330 780 78.28617 330 780 78.28617 360 750 84.95863 360 750 84.95863 390 720 89.55929 390 720 89.55929 420 690 91.72763 420 690 91.72763 450 660 91.29311 450 660 91.29311 480 630 88.47875 480 630 88.47875 510 600 83.9954 510 600 83.9954 540 570 78.22076 540 570 78.22076 570 540 71.6077 570 540 71.6077 600 510 65.74013 600 510 65.74013 630 480 65.21977 630 480 65.21977 330 1140 32.23374 330 1140 32.23374 360 1140 33.77435 360 1140 33.77435 390 1140 35.36672 390 1140 35.36672 420 1140 36.94626 420 1140 36.94626 450 1140 38.44857 450 1140 38.44857 480 1140 39.81203 480 1140 39.81203 510 1140 40.98016 510 1140 40.98016 540 1140 41.90391 540 1140 41.90391 570 1140 42.54355 570 1140 42.54355 600 1140 42.87029 600 1140 42.87029 630 1140 42.91062 630 1140 42.91062 990 780 17.82801 990 780 17.82801 990 750 10.69445 990 750 10.69445 990 720 6.804693 990 720 6.804693 990 690 6.152544 990 690 6.152544 990 660 7.835781 990 660 7.835781 990 630 10.08404 990 630 10.08404 990 600 12.48875 990 600 12.48875 990 570 15.7144 990 570 15.7144 990 540 19.46885 990 540 19.46885 990 510 23.76349 990 510 23.76349 990 480 27.92428 990 480 27.92428 300 810 70.06299 300 810 70.06299 300 840 63.76248 300 840 63.76248 300 870 57.37818 300 870 57.37818 300 900 51.16201 300 900 51.16201 300 930 45.359 300 930 45.359 300 960 40.19776 300 960 40.19776 300 990 35.88166 300 990 35.88166 300 1020 32.58087 300 1020 32.58087 300 1050 30.42574 300 1050 30.42574 300 1080 29.50173 300 1080 29.50173 300 1110 29.83815 300 1110 29.83815 660 450 68.48664 660 450 68.48664 690 450 75.63672 690 450 75.63672 720 450 83.3451 720 450 83.3451 750 450 89.92067 750 450 89.92067 780 450 93.70007 780 450 93.70007 810 450 93.9145 810 450 93.9145 840 450 89.76984 840 450 89.76984 870 450 81.40951 870 450 81.40951 900 450 69.83741 900 450 69.83741 930 450 56.50795 930 450 56.50795 960 450 43.11915 960 450 43.11915 300 1140 30.8072 300 1140 30.8072 660 1140 42.91062 660 1140 42.91062 690 1110 45.08063 690 1110 45.08063 720 1080 50.73693 720 1080 50.73693 750 1050 57.81658 750 1050 57.81658 780 1020 64.26923 780 1020 64.26923 810 990 68.4953 810 990 68.4953 840 960 68.19495 840 960 68.19495 870 930 62.66437 870 930 62.66437 900 900 53.91066 900 900 53.91066 930 870 44.80638 930 870 44.80638 960 840 36.16482 960 840 36.16482 990 810 27.95096 990 810 27.95096 990 450 31.36964 990 450 31.36964 300 780 76.0307 300 780 76.0307 330 750 83.62625 330 750 83.62625 360 720 89.35453 360 720 89.35453 390 690 92.7682 390 690 92.7682 420 660 93.59723 420 660 93.59723 450 630 91.80338 450 630 91.80338 480 600 88.01651 480 600 88.01651 510 570 82.70958 510 570 82.70958 540 540 76.29463 540 540 76.29463 570 510 69.26947 570 510 69.26947 600 480 63.94483 600 480 63.94483 630 450 63.31864 630 450 63.31864 300 1170 31.98293 300 1170 31.98293 330 1170 33.31662 330 1170 33.31662 360 1170 34.75286 360 1170 34.75286 390 1170 36.23173 390 1170 36.23173 420 1170 37.69119 420 1170 37.69119 450 1170 39.06951 450 1170 39.06951 480 1170 40.30775 480 1170 40.30775 510 1170 41.35211 510 1170 41.35211 540 1170 42.15609 540 1170 42.15609 570 1170 42.68236 570 1170 42.68236 600 1170 42.90428 600 1170 42.90428 630 1170 42.91062 630 1170 42.91062 1020 780 20.70066 1020 780 20.70066 1020 750 12.47489 1020 750 12.47489 1020 720 7.375809 1020 720 7.375809 1020 690 5.445051 1020 690 5.445051 1020 660 6.230351 1020 660 6.230351 1020 630 7.782293 1020 630 7.782293 1020 600 9.460534 1020 600 9.460534 1020 570 10.93704 1020 570 10.93704 1020 540 12.91595 1020 540 12.91595 1020 510 15.92204 1020 510 15.92204 1020 480 19.34747 1020 480 19.34747 1020 450 22.7155 1020 450 22.7155 270 810 67.15836 270 810 67.15836 270 840 60.93082 270 840 60.93082 270 870 54.64669 270 870 54.64669 270 900 48.55575 270 900 48.55575 270 930 42.90044 270 930 42.90044 270 960 37.9064 270 960 37.9064 270 990 33.77366 270 990 33.77366 270 1020 30.66885 270 1020 30.66885 270 1050 28.71854 270 1050 28.71854 270 1080 28.0043 270 1080 28.0043 270 1110 28.51954 270 1110 28.51954 270 1140 29.55228 270 1140 29.55228 660 420 65.15848 660 420 65.15848 690 420 72.48213 690 420 72.48213 720 420 80.40982 720 420 80.40982 750 420 87.4587 750 420 87.4587 780 420 91.92159 780 420 91.92159 810 420 92.73139 810 420 92.73139 840 420 89.0367 840 420 89.0367 870 420 81.2103 870 420 81.2103 900 420 70.25376 900 420 70.25376 930 420 57.59272 930 420 57.59272 960 420 44.8905 960 420 44.8905 990 420 33.80872 990 420 33.80872 270 1170 30.8072 270 1170 30.8072 660 1170 42.91062 660 1170 42.91062 690 1140 44.05533 690 1140 44.05533 720 1110 48.23812 720 1110 48.23812 750 1080 54.99628 750 1080 54.99628 780 1050 62.35217 780 1050 62.35217 810 1020 68.57125 810 1020 68.57125 840 990 72.21984 840 990 72.21984 870 960 71.16364 870 960 71.16364 900 930 65.64694 900 930 65.64694 930 900 58.04881 930 900 58.04881 960 870 49.58645 960 870 49.58645 990 840 40.54727 990 840 40.54727 1020 810 31.76783 1020 810 31.76783 1020 420 25.76762 1020 420 25.76762 270 780 73.08181 270 780 73.08181 300 750 81.42907 300 750 81.42907 330 720 88.16192 330 720 88.16192 360 690 92.75629 360 690 92.75629 390 660 94.84954 390 660 94.84954 420 630 94.26993 420 630 94.26993 450 600 91.31878 450 600 91.31878 480 570 86.6485 480 570 86.6485 510 540 80.62571 510 540 80.62571 540 510 73.71424 540 510 73.71424 570 480 66.44639 570 480 66.44639 600 450 61.30073 600 450 61.30073 630 420 59.84544 630 420 59.84544 270 1200 32.23374 270 1200 32.23374 300 1200 33.31662 300 1200 33.31662 330 1200 34.54146 330 1200 34.54146 360 1200 35.85542 360 1200 35.85542 390 1200 37.20139 390 1200 37.20139 420 1200 38.52036 420 1200 38.52036 450 1200 39.75377 450 1200 39.75377 480 1200 40.84588 480 1200 40.84588 510 1200 41.74603 510 1200 41.74603 540 1200 42.41076 540 1200 42.41076 570 1200 42.80555 570 1200 42.80555 600 1200 42.91062 600 1200 42.91062 630 1200 42.91062 630 1200 42.91062 1050 780 23.61371 1050 780 23.61371 1050 750 14.46178 1050 750 14.46178 1050 720 8.27981 1050 720 8.27981 1050 690 5.158763 1050 690 5.158763 1050 660 4.900972 1050 660 4.900972 1050 630 5.689189 1050 630 5.689189 1050 600 6.647724 1050 600 6.647724 1050 570 7.772603 1050 570 7.772603 1050 540 9.554898 1050 540 9.554898 1050 510 11.94859 1050 510 11.94859 1050 480 14.85725 1050 480 14.85725 1050 450 18.1637 1050 450 18.1637 1050 420 21.73452 1050 420 21.73452 240 810 63.76751 240 810 63.76751 240 840 57.6982 240 840 57.6982 240 870 51.60288 240 870 51.60288 240 900 45.72719 240 900 45.72719 240 930 40.3089 240 930 40.3089 240 960 35.56866 240 960 35.56866 240 990 31.70133 240 990 31.70133 240 1020 28.86828 240 1020 28.86828 240 1050 27.19084 240 1050 27.19084 240 1080 26.74534 240 1080 26.74534 240 1110 27.43555 240 1110 27.43555 240 1140 28.51954 240 1140 28.51954 240 1170 29.83815 240 1170 29.83815 660 390 59.95279 660 390 59.95279 690 390 66.84312 690 390 66.84312 720 390 74.4528 720 390 74.4528 750 390 81.41394 750 390 81.41394 780 390 86.316 780 390 86.316 810 390 87.51821 810 390 87.51821 840 390 84.48605 840 390 84.48605 870 390 77.57743 870 390 77.57743 900 390 67.7269 900 390 67.7269 930 390 56.3418 930 390 56.3418 960 390 45.00275 960 390 45.00275 990 390 35.27752 990 390 35.27752 1020 390 28.49504 1020 390 28.49504 240 1200 31.33935 240 1200 31.33935 660 1200 42.91062 660 1200 42.91062 690 1170 43.9806 690 1170 43.9806 720 1140 47.44552 720 1140 47.44552 750 1110 53.27648 750 1110 53.27648 780 1080 60.83369 780 1080 60.83369 810 1050 68.338 810 1050 68.338 840 1020 74.40621 840 1020 74.40621 870 990 77.68336 870 990 77.68336 900 960 76.22981 900 960 76.22981 930 930 71.28427 930 930 71.28427 960 900 63.9323 960 900 63.9323 990 870 54.87149 990 870 54.87149 1020 840 45.14836 1020 840 45.14836 1050 810 35.42167 1050 810 35.42167 1050 390 25.42533 1050 390 25.42533 240 780 69.56688 240 780 69.56688 270 750 78.46563 270 750 78.46563 300 720 86.04328 300 720 86.04328 330 690 91.71017 330 690 91.71017 360 660 95.02192 360 660 95.02192 390 630 95.70617 390 630 95.70617 420 600 93.77491 420 600 93.77491 450 570 89.88439 450 570 89.88439 480 540 84.43 480 540 84.43 510 510 77.82991 510 510 77.82991 540 480 70.59006 540 480 70.59006 570 450 63.15011 570 450 63.15011 600 420 57.69396 600 420 57.69396 630 390 55.11619 630 390 55.11619 1080 780 25.98549 1080 780 25.98549 1080 750 16.10557 1080 750 16.10557 1080 720 9.017711 1080 720 9.017711 1080 690 4.861352 1080 690 4.861352 1080 660 3.5771 1080 660 3.5771 1080 630 3.605434 1080 630 3.605434 1080 600 4.020609 1080 600 4.020609 1080 570 5.172964 1080 570 5.172964 1080 540 7.042416 1080 540 7.042416 1080 510 9.554898 1080 510 9.554898 1080 480 12.61118 1080 480 12.61118 1080 450 16.09067 1080 450 16.09067 1080 420 19.85604 1080 420 19.85604 1080 390 23.75852 1080 390 23.75852 210 810 60.03718 210 810 60.03718 210 840 54.20751 210 840 54.20751 210 870 48.38496 210 870 48.38496 210 900 42.80904 210 900 42.80904 210 930 37.71089 210 930 37.71089 210 960 33.30422 210 960 33.30422 210 990 29.7769 210 990 29.7769 210 1020 27.28358 210 1020 27.28358 210 1050 25.93903 210 1050 25.93903 210 1080 25.81303 210 1080 25.81303 210 1110 26.62899 210 1110 26.62899 210 1140 27.75058 210 1140 27.75058 210 1170 29.11586 210 1170 29.11586 210 1200 30.67173 210 1200 30.67173 660 360 53.59819 660 360 53.59819 690 360 59.53424 690 360 59.53424 720 360 66.35981 720 360 66.35981 750 360 72.81788 750 360 72.81788 780 360 77.58941 780 360 77.58941 810 360 79.05938 810 360 79.05938 840 360 76.80065 840 360 76.80065 870 360 71.13463 870 360 71.13463 900 360 62.8961 900 360 62.8961 930 360 53.31695 930 360 53.31695 960 360 43.90894 960 360 43.90894 990 360 36.11303 990 360 36.11303 1020 360 31.11776 1020 360 31.11776 1050 360 29.08639 1050 360 29.08639 690 1200 43.77228 690 1200 43.77228 720 1170 47.2872 720 1170 47.2872 750 1140 52.95104 750 1140 52.95104 780 1110 60.18205 780 1110 60.18205 810 1080 68.34431 810 1080 68.34431 840 1050 75.97388 840 1050 75.97388 870 1020 82.023 870 1020 82.023 900 990 84.98143 900 990 84.98143 930 960 83.51103 930 960 83.51103 960 930 78.19131 960 930 78.19131 990 900 70.01068 990 900 70.01068 1020 870 60.10287 1020 870 60.10287 1050 840 49.35154 1050 840 49.35154 1080 810 38.31818 1080 810 38.31818 1080 360 27.64367 1080 360 27.64367 210 780 65.63558 210 780 65.63558 240 750 74.86365 240 750 74.86365 270 720 83.09535 270 720 83.09535 300 690 89.6884 300 690 89.6884 330 660 94.12519 330 660 94.12519 360 630 96.05278 360 630 96.05278 390 600 95.28977 390 600 95.28977 420 570 92.29127 420 570 92.29127 450 540 87.5574 450 540 87.5574 480 510 81.45046 480 510 81.45046 510 480 74.43785 510 480 74.43785 540 450 66.96341 540 450 66.96341 570 420 59.39901 570 420 59.39901 600 390 53.41372 600 390 53.41372 630 360 49.76463 630 360 49.76463 1110 780 27.3351 1110 780 27.3351 1110 750 16.9518 1110 750 16.9518 1110 720 9.179739 1110 720 9.179739 1110 690 4.203458 1110 690 4.203458 1110 660 2.047406 1110 660 2.047406 1110 630 1.644623 1110 630 1.644623 1110 600 2.047406 1110 600 2.047406 1110 570 3.23933 1110 570 3.23933 1110 540 5.172964 1110 540 5.172964 1110 510 7.772603 1110 510 7.772603 1110 480 10.93704 1110 480 10.93704 1110 450 14.54326 1110 450 14.54326 1110 420 18.45111 1110 420 18.45111 1110 390 22.50866 1110 390 22.50866 1110 360 26.55803 1110 360 26.55803 180 810 56.12859 180 810 56.12859 180 840 50.61422 180 840 50.61422 180 870 45.14175 180 870 45.14175 180 900 39.94266 180 900 39.94266 180 930 35.23957 180 930 35.23957 180 960 31.23744 180 960 31.23744 180 990 28.11534 180 990 28.11534 180 1020 26.01983 180 1020 26.01983 180 1050 25.05854 180 1050 25.05854 180 1080 25.28049 180 1080 25.28049 180 1110 26.13181 180 1110 26.13181 180 1140 27.27642 180 1140 27.27642 180 1170 28.67018 180 1170 28.67018 180 1200 30.25942 180 1200 30.25942 660 330 47.10303 660 330 47.10303 690 330 51.70267 690 330 51.70267 720 330 57.39434 720 330 57.39434 750 330 62.98643 750 330 62.98643 780 330 67.01031 780 330 67.01031 810 330 68.62599 810 330 68.62599 840 330 67.21588 840 330 67.21588 870 330 62.99195 870 330 62.99195 900 330 56.68956 900 330 56.68956 930 330 49.38576 930 330 49.38576 960 330 42.33849 960 330 42.33849 990 330 36.87064 990 330 36.87064 1020 330 34.00767 1020 330 34.00767 1050 330 32.56846 1050 330 32.56846 1080 330 31.35733 1080 330 31.35733 720 1200 46.83077 720 1200 46.83077 750 1170 52.70285 750 1170 52.70285 780 1140 60.36053 780 1140 60.36053 810 1110 68.94959 810 1110 68.94959 840 1080 77.60854 840 1080 77.60854 870 1050 85.38519 870 1050 85.38519 900 1020 91.49625 900 1020 91.49625 930 990 93.83227 930 990 93.83227 960 960 91.35965 960 960 91.35965 990 930 84.9683 990 930 84.9683 1020 900 75.73964 1020 900 75.73964 1050 870 64.6794 1050 870 64.6794 1080 840 52.56901 1080 840 52.56901 1110 810 39.96919 1110 810 39.96919 1110 330 30.44153 1110 330 30.44153 180 780 61.45378 180 780 61.45378 210 750 70.77471 210 750 70.77471 240 720 79.44542 240 720 79.44542 270 690 86.7858 270 690 86.7858 300 660 92.21641 300 660 92.21641 330 630 95.30262 330 630 95.30262 360 600 95.78096 360 600 95.78096 390 570 93.77491 390 570 93.77491 420 540 89.88439 420 540 89.88439 450 510 84.44534 450 510 84.44534 480 480 77.85726 480 480 77.85726 510 450 70.54434 510 450 70.54434 540 420 62.84293 540 420 62.84293 570 390 55.36224 570 390 55.36224 600 360 48.99586 600 360 48.99586 630 330 44.64707 630 330 44.64707 1140 780 27.35395 1140 780 27.35395 1140 750 16.714 1140 750 16.714 1140 720 8.555513 1140 720 8.555513 1140 690 3.192118 1140 690 3.192118 1140 660 0.8304025 1140 660 0.8304025 1140 630 0.418504 1140 630 0.418504 1140 600 0.8304025 1140 600 0.8304025 1140 570 2.047406 1140 570 2.047406 1140 540 4.020609 1140 540 4.020609 1140 510 6.673576 1140 510 6.673576 1140 480 9.904043 1140 480 9.904043 1140 450 13.58759 1140 450 13.58759 1140 420 17.58236 1140 420 17.58236 1140 390 21.73452 1140 390 21.73452 1140 360 25.88412 1140 360 25.88412 1140 330 29.87131 1140 330 29.87131 150 810 52.21092 150 810 52.21092 150 840 47.08007 150 840 47.08007 150 870 42.02653 150 870 42.02653 150 900 37.27202 150 900 37.27202 150 930 33.02898 150 930 33.02898 150 960 29.49198 150 960 29.49198 150 990 26.82968 150 990 26.82968 150 1020 25.17794 150 1020 25.17794 150 1050 24.6398 150 1050 24.6398 150 1080 25.10671 150 1080 25.10671 150 1110 25.96384 150 1110 25.96384 150 1140 27.1162 150 1140 27.1162 150 1170 28.51954 150 1170 28.51954 150 1200 30.11999 150 1200 30.11999 660 300 41.59899 660 300 41.59899 690 300 44.65469 690 300 44.65469 720 300 48.96977 720 300 48.96977 750 300 53.18696 750 300 53.18696 780 300 56.32395 780 300 56.32395 810 300 61.76778 810 300 61.76778 840 300 67.34968 840 300 67.34968 870 300 58.53529 870 300 58.53529 900 300 50.339 900 300 50.339 930 300 45.52262 930 300 45.52262 960 300 41.13489 960 300 41.13489 990 300 38.19938 990 300 38.19938 1020 300 36.87767 1020 300 36.87767 1050 300 36.13717 1050 300 36.13717 1080 300 36.17876 1080 300 36.17876 1110 300 36.37339 1110 300 36.37339 750 1200 51.97951 750 1200 51.97951 780 1170 60.01786 780 1170 60.01786 810 1140 69.38953 810 1140 69.38953 840 1110 79.15337 840 1110 79.15337 870 1080 88.3626 870 1080 88.3626 900 1050 96.14384 900 1050 96.14384 930 1020 101.5463 930 1020 101.5463 960 990 102.5394 960 990 102.5394 990 960 98.75945 990 960 98.75945 1020 930 91.094 1020 930 91.094 1050 900 80.547 1050 900 80.547 1080 870 68.03724 1080 870 68.03724 1110 840 54.32405 1110 840 54.32405 1140 810 40.06649 1140 810 40.06649 1140 300 36.27911 1140 300 36.27911 150 780 57.19703 150 780 57.19703 180 750 66.36828 180 750 66.36828 210 720 75.24601 210 720 75.24601 240 690 83.12838 240 690 83.12838 270 660 89.38832 270 660 89.38832 300 630 93.52175 300 630 93.52175 330 600 95.18471 330 600 95.18471 360 570 94.32818 360 570 94.32818 390 540 91.54913 390 540 91.54913 420 510 87.07115 420 510 87.07115 450 480 81.16715 450 480 81.16715 480 450 74.22768 480 450 74.22768 510 420 66.52198 510 420 66.52198 540 390 58.63514 540 390 58.63514 570 360 51.27459 570 360 51.27459 600 330 45.13308 600 330 45.13308 630 300 40.7051 630 300 40.7051 1170 780 26.66214 1170 780 26.66214 1170 750 16.09446 1170 750 16.09446 1170 720 8.007869 1170 720 8.007869 1170 690 2.7146 1170 690 2.7146 1170 660 0.418504 1170 660 0.418504 1170 630 0 1170 630 0 1170 600 0.418504 1170 600 0.418504 1170 570 1.644623 1170 570 1.644623 1170 540 3.631286 1170 540 3.631286 1170 510 6.302224 1170 510 6.302224 1170 480 9.554898 1170 480 9.554898 1170 450 13.26443 1170 450 13.26443 1170 420 17.28842 1170 420 17.28842 1170 390 21.47238 1170 390 21.47238 1170 360 25.65568 1170 360 25.65568 1170 330 29.67773 1170 330 29.67773 1170 300 35.74983 1170 300 35.74983 120 810 48.45451 120 810 48.45451 120 840 43.7664 120 840 43.7664 120 870 39.19061 120 870 39.19061 120 900 34.93757 120 900 34.93757 120 930 31.20806 120 930 31.20806 120 960 28.18492 120 960 28.18492 120 990 26.02551 120 990 26.02551 120 1020 24.85556 120 1020 24.85556 120 1050 24.75542 120 1050 24.75542 120 1080 25.28049 120 1080 25.28049 120 1110 26.13181 120 1110 26.13181 120 1140 27.27642 120 1140 27.27642 120 1170 28.67018 120 1170 28.67018 120 1200 30.25942 120 1200 30.25942 660 270 38.16046 660 270 38.16046 690 270 39.6142 690 270 39.6142 720 270 42.14758 720 270 42.14758 750 270 44.88641 750 270 44.88641 780 270 47.11303 780 270 47.11303 810 270 58.46037 810 270 58.46037 840 270 68.60725 840 270 68.60725 870 270 57.30749 870 270 57.30749 900 270 45.091 900 270 45.091 930 270 42.7759 930 270 42.7759 960 270 41.0525 960 270 41.0525 990 270 40.3067 990 270 40.3067 1020 270 41.00982 1020 270 41.00982 1050 270 42.96388 1050 270 42.96388 1080 270 45.19611 1080 270 45.19611 1110 270 46.83496 1110 270 46.83496 1140 270 47.28645 1140 270 47.28645 780 1200 59.01376 780 1200 59.01376 810 1170 68.94959 810 1170 68.94959 840 1140 79.69141 840 1140 79.69141 870 1110 90.23573 870 1110 90.23573 900 1080 99.61783 900 1080 99.61783 930 1050 106.9908 930 1050 106.9908 960 1020 111.0674 960 1020 111.0674 990 990 110.5053 990 990 110.5053 1020 960 105.2182 1020 960 105.2182 1050 930 96.03559 1050 930 96.03559 1080 900 83.90737 1080 900 83.90737 1110 870 69.72792 1110 870 69.72792 1140 840 54.36315 1140 840 54.36315 1170 810 39.3034 1170 810 39.3034 1170 270 46.35043 1170 270 46.35043 120 780 53.04346 120 780 53.04346 150 750 61.82496 150 750 61.82496 180 720 70.66891 180 720 70.66891 210 690 78.86847 210 690 78.86847 240 660 85.76449 240 660 85.76449 270 630 90.79667 270 630 90.79667 300 600 93.6085 300 600 93.6085 330 570 94.17024 330 570 94.17024 360 540 92.6687 360 540 92.6687 390 510 89.3361 390 510 89.3361 420 480 84.35085 420 480 84.35085 450 450 78.04707 450 450 78.04707 480 420 70.65587 480 420 70.65587 510 390 62.64433 510 390 62.64433 540 360 54.70665 540 360 54.70665 570 330 47.56472 570 330 47.56472 600 300 41.91282 600 300 41.91282 630 270 38.32275 630 270 38.32275 1200 780 26.00718 1200 780 26.00718 1200 750 15.74247 1200 750 15.74247 1200 720 7.940756 1200 720 7.940756 1200 690 2.909063 1200 690 2.909063 1200 660 0.8304025 1200 660 0.8304025 1200 630 0.418504 1200 630 0.418504 1200 600 0.8304025 1200 600 0.8304025 1200 570 2.047406 1200 570 2.047406 1200 540 4.020609 1200 540 4.020609 1200 510 6.673576 1200 510 6.673576 1200 480 9.904043 1200 480 9.904043 1200 450 13.58759 1200 450 13.58759 1200 420 17.58236 1200 420 17.58236 1200 390 21.73452 1200 390 21.73452 1200 360 25.88412 1200 360 25.88412 1200 330 29.87131 1200 330 29.87131 1200 300 34.96949 1200 300 34.96949 1200 270 44.26106 1200 270 44.26106 90 810 45.02399 90 810 45.02399 90 840 40.82755 90 840 40.82755 90 870 36.77701 90 870 36.77701 90 900 33.07016 90 900 33.07016 90 930 29.89482 90 930 29.89482 90 960 27.42087 90 960 27.42087 90 990 25.79313 90 990 25.79313 90 1020 25.12501 90 1020 25.12501 90 1050 25.28049 90 1050 25.28049 90 1080 25.79474 90 1080 25.79474 90 1110 26.62899 90 1110 26.62899 90 1140 27.75058 90 1140 27.75058 90 1170 29.11586 90 1170 29.11586 90 1200 30.67173 90 1200 30.67173 660 240 36.79895 660 240 36.79895 690 240 37.17336 690 240 37.17336 720 240 37.97745 720 240 37.97745 750 240 39.33806 750 240 39.33806 780 240 40.71164 780 240 40.71164 810 240 45.69826 810 240 45.69826 840 240 52.28063 840 240 52.28063 870 240 46.128 870 240 46.128 900 240 41.97903 900 240 41.97903 930 240 41.98842 930 240 41.98842 960 240 42.07745 960 240 42.07745 990 240 44.05091 990 240 44.05091 1020 240 48.00231 1020 240 48.00231 1050 240 52.85049 1050 240 52.85049 1080 240 57.40407 1080 240 57.40407 1110 240 60.56305 1110 240 60.56305 1140 240 61.58398 1140 240 61.58398 1170 240 60.21352 1170 240 60.21352 810 1200 67.65635 810 1200 67.65635 840 1170 79.15337 840 1170 79.15337 870 1140 90.87068 870 1140 90.87068 900 1110 101.7696 900 1110 101.7696 930 1080 110.8958 930 1080 110.8958 960 1050 117.4067 960 1050 117.4067 990 1020 119.7263 990 1020 119.7263 1020 990 117.2726 1020 990 117.2726 1050 960 110.2488 1050 960 110.2488 1080 930 99.31663 1080 930 99.31663 1110 900 85.41697 1110 900 85.41697 1140 870 69.69458 1140 870 69.69458 1170 840 53.53117 1170 840 53.53117 1200 810 38.33332 1200 810 38.33332 1200 240 56.73516 1200 240 56.73516 90 780 49.16681 90 780 49.16681 120 750 57.3294 120 750 57.3294 150 720 65.89847 150 720 65.89847 180 690 74.1789 180 690 74.1789 210 660 81.49582 210 660 81.49582 240 630 87.25118 240 630 87.25118 270 600 91.27073 270 600 91.27073 300 570 93.27594 300 570 93.27594 330 540 93.13499 330 540 93.13499 360 510 91.09118 360 510 91.09118 390 480 87.22658 390 480 87.22658 420 450 81.78841 420 450 81.78841 450 420 75.01687 450 420 75.01687 480 390 67.20755 480 390 67.20755 510 360 59.01787 510 360 59.01787 540 330 51.1676 540 330 51.1676 570 300 44.38081 570 300 44.38081 600 270 39.33079 600 270 39.33079 630 240 36.58916 630 240 36.58916 60 810 42.07165 60 810 42.07165 60 840 38.40449 60 840 38.40449 60 870 34.9144 60 870 34.9144 60 900 31.78537 60 900 31.78537 60 930 29.19111 60 930 29.19111 60 960 27.28753 60 960 27.28753 60 990 26.20585 60 990 26.20585 60 1020 25.96384 60 1020 25.96384 60 1050 26.13181 60 1050 26.13181 60 1080 26.62899 60 1080 26.62899 60 1110 27.43555 60 1110 27.43555 60 1140 28.51954 60 1140 28.51954 60 1170 29.83815 60 1170 29.83815 60 1200 31.33935 60 1200 31.33935 660 210 35.58346 660 210 35.58346 690 210 36.00432 690 210 36.00432 720 210 36.56705 720 210 36.56705 750 210 37.24645 750 210 37.24645 780 210 38.01199 780 210 38.01199 810 210 38.82897 810 210 38.82897 840 210 39.66 840 210 39.66 870 210 40.4665 870 210 40.4665 900 210 41.21031 900 210 41.21031 930 210 41.85525 930 210 41.85525 960 210 44.03029 960 210 44.03029 990 210 49.31972 990 210 49.31972 1020 210 56.54349 1020 210 56.54349 1050 210 64.2562 1050 210 64.2562 1080 210 71.08126 1080 210 71.08126 1110 210 75.72177 1110 210 75.72177 1140 210 77.29577 1140 210 77.29577 1170 210 75.5006 1170 210 75.5006 1200 210 70.66523 1200 210 70.66523 840 1200 77.56844 840 1200 77.56844 870 1170 90.23573 870 1170 90.23573 900 1140 102.4982 900 1140 102.4982 930 1110 113.3105 930 1110 113.3105 960 1080 121.7576 960 1080 121.7576 990 1050 126.8843 990 1050 126.8843 1020 1020 127.2207 1020 1020 127.2207 1050 990 122.6534 1050 990 122.6534 1080 960 113.5987 1080 960 113.5987 1110 930 100.8414 1110 930 100.8414 1140 900 85.47166 1140 900 85.47166 1170 870 68.79749 1170 870 68.79749 1200 840 52.24018 1200 840 52.24018 60 780 45.72949 60 780 45.72949 90 750 53.06306 90 750 53.06306 120 720 61.12439 120 720 61.12439 150 690 69.24634 150 690 69.24634 180 660 76.75488 180 660 76.75488 210 630 83.10669 210 630 83.10669 240 600 88.22825 240 600 88.22825 270 570 91.6009 270 570 91.6009 300 540 92.87514 300 540 92.87514 330 510 92.21739 330 510 92.21739 360 480 89.63579 360 480 89.63579 390 450 85.27714 390 450 85.27714 420 420 79.38055 420 420 79.38055 450 390 72.09303 450 390 72.09303 480 360 63.98369 480 360 63.98369 510 330 55.74929 510 330 55.74929 540 300 48.12304 540 300 48.12304 570 270 41.81893 570 270 41.81893 600 240 37.47797 600 240 37.47797 630 210 35.40761 630 210 35.40761 30 810 39.73129 30 810 39.73129 30 840 36.61901 30 840 36.61901 30 870 33.71164 30 870 33.71164 30 900 31.17842 30 900 31.17842 30 930 29.17809 30 930 29.17809 30 960 27.85174 30 960 27.85174 30 990 27.27642 30 990 27.27642 30 1020 27.1162 30 1020 27.1162 30 1050 27.27642 30 1050 27.27642 30 1080 27.75058 30 1080 27.75058 30 1110 28.51954 30 1110 28.51954 30 1140 29.55228 30 1140 29.55228 30 1170 30.8072 30 1170 30.8072 30 1200 32.23374 30 1200 32.23374 660 180 34.41281 660 180 34.41281 690 180 34.87636 690 180 34.87636 720 180 35.49736 720 180 35.49736 750 180 36.24923 750 180 36.24923 780 180 37.09968 780 180 37.09968 810 180 38.01199 810 180 38.01199 840 180 38.94654 840 180 38.94654 870 180 39.86234 870 180 39.86234 900 180 40.71873 900 180 40.71873 930 180 41.88548 930 180 41.88548 960 180 46.6297 960 180 46.6297 990 180 54.83678 990 180 54.83678 1020 180 65.01935 1020 180 65.01935 1050 180 75.35479 1050 180 75.35479 1080 180 84.2164 1080 180 84.2164 1110 180 90.18302 1110 180 90.18302 1140 180 92.25313 1140 180 92.25313 1170 180 90.08118 1170 180 90.08118 1200 180 84.03528 1200 180 84.03528 870 1200 88.3626 870 1200 88.3626 900 1170 101.7696 900 1170 101.7696 930 1140 114.1275 930 1140 114.1275 960 1110 124.4139 960 1110 124.4139 990 1080 131.7811 990 1080 131.7811 1020 1050 135.0937 1020 1050 135.0937 1050 1020 133.2894 1050 1020 133.2894 1080 990 126.5614 1080 990 126.5614 1110 960 115.511 1110 960 115.511 1140 930 101.0881 1140 930 101.0881 1170 900 84.51462 1170 900 84.51462 1200 870 67.1865 1200 870 67.1865 30 780 42.8762 30 780 42.8762 60 750 49.19716 60 750 49.19716 90 720 56.53439 90 720 56.53439 120 690 64.26401 120 690 64.26401 150 660 71.72922 150 660 71.72922 180 630 78.52294 180 630 78.52294 210 600 84.55958 210 600 84.55958 240 570 89.17299 240 570 89.17299 270 540 91.85341 270 540 91.85341 300 510 92.6298 300 510 92.6298 330 480 91.44859 330 480 91.44859 360 450 88.34991 360 450 88.34991 390 420 83.53398 390 420 83.53398 420 390 77.06988 420 390 77.06988 450 360 69.36932 450 360 69.36932 480 330 61.08573 480 330 61.08573 510 300 52.94046 510 300 52.94046 540 270 45.66793 540 270 45.66793 570 240 39.96027 570 240 39.96027 600 210 36.40784 600 210 36.40784 630 180 34.35688 630 180 34.35688 0 810 38.11278 0 810 38.11278 0 840 35.56859 0 840 35.56859 0 870 33.25302 0 870 33.25302 0 900 31.31984 0 900 31.31984 0 930 29.91218 0 930 29.91218 0 960 29.11586 0 960 29.11586 0 990 28.67018 0 990 28.67018 0 1020 28.51954 0 1020 28.51954 0 1050 28.67018 0 1050 28.67018 0 1080 29.11586 0 1080 29.11586 0 1110 29.83815 0 1110 29.83815 0 1140 30.8072 0 1140 30.8072 0 1170 31.98293 0 1170 31.98293 0 1200 33.31662 0 1200 33.31662 660 150 33.33252 660 150 33.33252 690 150 33.83403 690 150 33.83403 720 150 34.50687 720 150 34.50687 750 150 35.32321 750 150 35.32321 780 150 36.24923 780 150 36.24923 810 150 37.24645 810 150 37.24645 840 150 38.27328 840 150 38.27328 870 150 39.28662 870 150 39.28662 900 150 40.24363 900 150 40.24363 930 150 42.53047 930 150 42.53047 960 150 49.33374 960 150 49.33374 990 150 59.7343 990 150 59.7343 1020 150 72.08134 1020 150 72.08134 1050 150 84.41583 1050 150 84.41583 1080 150 94.84523 1080 150 94.84523 1110 150 101.8499 1110 150 101.8499 1140 150 104.3162 1140 150 104.3162 1170 150 101.8499 1170 150 101.8499 1200 150 94.84523 1200 150 94.84523 900 1200 99.61783 900 1200 99.61783 930 1170 113.3105 930 1170 113.3105 960 1140 125.3121 960 1140 125.3121 990 1110 134.6523 990 1110 134.6523 1020 1080 140.562 1020 1080 140.562 1050 1050 141.7554 1050 1050 141.7554 1080 1020 137.7226 1080 1020 137.7226 1110 990 128.8588 1110 990 128.8588 1140 960 115.9439 1140 960 115.9439 1170 930 100.0776 1170 930 100.0776 1200 900 82.59134 1200 900 82.59134 0 780 40.72814 0 780 40.72814 30 750 45.88593 30 750 45.88593 60 720 52.30694 60 720 52.30694 90 690 59.42431 90 690 59.42431 120 660 66.61403 120 660 66.61403 150 630 73.64055 150 630 73.64055 180 600 80.37224 180 600 80.37224 210 570 86.05766 210 570 86.05766 240 540 90.07156 240 540 90.07156 270 510 92.27995 270 510 92.27995 300 480 92.56803 300 480 92.56803 330 450 90.86622 330 450 90.86622 360 420 87.28525 360 420 87.28525 390 390 81.91637 390 390 81.91637 420 360 74.93835 420 360 74.93835 450 330 66.94016 450 330 66.94016 480 300 58.61061 480 300 58.61061 510 270 50.68357 510 270 50.68357 540 240 43.88263 540 240 43.88263 570 210 38.86713 570 210 38.86713 600 180 35.67099 600 180 35.67099 630 150 33.41841 630 150 33.41841 660 120 32.38458 660 120 32.38458 690 120 32.91849 690 120 32.91849 720 120 33.63555 720 120 33.63555 750 120 34.50687 750 120 34.50687 780 120 35.49736 780 120 35.49736 810 120 36.56705 810 120 36.56705 840 120 37.67268 840 120 37.67268 870 120 38.76939 870 120 38.76939 900 120 39.81249 900 120 39.81249 930 120 43.1249 930 120 43.1249 960 120 51.28813 960 120 51.28813 990 120 63.12685 990 120 63.12685 1020 120 76.87728 1020 120 76.87728 1050 120 90.46502 1050 120 90.46502 1080 120 101.8499 1080 120 101.8499 1110 120 109.4538 1110 120 109.4538 1140 120 112.1287 1140 120 112.1287 1170 120 109.4538 1170 120 109.4538 1200 120 101.8499 1200 120 101.8499 930 1200 110.8958 930 1200 110.8958 960 1170 124.4139 960 1170 124.4139 990 1140 135.6228 990 1140 135.6228 1020 1110 143.6314 1020 1110 143.6314 1050 1080 147.7089 1050 1080 147.7089 1080 1050 146.6438 1080 1050 146.6438 1110 1020 140.3686 1110 1020 140.3686 1140 990 129.468 1140 990 129.468 1170 960 114.8874 1170 960 114.8874 1200 930 97.85646 1200 930 97.85646 0 750 43.2607 0 750 43.2607 30 720 48.60439 30 720 48.60439 60 690 54.91134 60 690 54.91134 90 660 61.60469 90 660 61.60469 120 630 68.61723 120 630 68.61723 150 600 75.79828 150 600 75.79828 180 570 82.35069 180 570 82.35069 210 540 87.56795 210 540 87.56795 240 510 91.15672 240 510 91.15672 270 480 92.93274 270 480 92.93274 300 450 92.71711 300 450 92.71711 330 420 90.51166 330 420 90.51166 360 390 86.42767 360 390 86.42767 390 360 80.46108 390 360 80.46108 420 330 73.07167 420 330 73.07167 450 300 64.8958 450 300 64.8958 480 270 56.64594 480 270 56.64594 510 240 49.05649 510 240 49.05649 540 210 42.82883 540 210 42.82883 570 180 38.42255 570 180 38.42255 600 150 34.98283 600 150 34.98283 630 120 32.59679 630 120 32.59679 660 90 31.60582 660 90 31.60582 690 90 32.16584 690 90 32.16584 720 90 32.91849 720 90 32.91849 750 90 33.83403 750 90 33.83403 780 90 34.87636 780 90 34.87636 810 90 36.00432 810 90 36.00432 840 90 37.17336 840 90 37.17336 870 90 38.33721 870 90 38.33721 900 90 39.44968 900 90 39.44968 930 90 43.20321 930 90 43.20321 960 90 51.87761 960 90 51.87761 990 90 64.25201 990 90 64.25201 1020 90 78.5233 1020 90 78.5233 1050 90 92.57702 1050 90 92.57702 1080 90 104.3162 1080 90 104.3162 1110 90 112.1287 1110 90 112.1287 1140 90 114.8874 1140 90 114.8874 1170 90 112.1287 1170 90 112.1287 1200 90 104.3162 1200 90 104.3162 960 1200 121.7576 960 1200 121.7576 990 1170 134.6523 990 1170 134.6523 1020 1140 144.6636 1020 1140 144.6636 1050 1110 151.0053 1050 1110 151.0053 1080 1080 152.9731 1080 1080 152.9731 1110 1050 149.5949 1110 1050 149.5949 1140 1020 141.1406 1140 1020 141.1406 1170 990 128.3743 1170 990 128.3743 1200 960 112.3894 1200 960 112.3894 0 720 45.56675 0 720 45.56675 30 690 50.89394 30 690 50.89394 60 660 56.88922 60 660 56.88922 90 630 63.62154 90 630 63.62154 120 600 70.98885 120 600 70.98885 150 570 78.174 150 570 78.174 180 540 84.4156 180 540 84.4156 210 510 89.2858 210 510 89.2858 240 480 92.51822 240 480 92.51822 270 450 93.82822 270 450 93.82822 300 420 93.10742 300 420 93.10742 330 390 90.42266 330 390 90.42266 360 360 85.72235 360 360 85.72235 390 330 79.24346 390 330 79.24346 420 300 71.55158 420 300 71.55158 450 270 63.31746 450 270 63.31746 480 240 55.26542 480 240 55.26542 510 210 48.11876 510 210 48.11876 540 180 42.53378 540 180 42.53378 570 150 38.01829 570 150 38.01829 600 120 34.34283 600 120 34.34283 630 90 31.90124 630 90 31.90124 660 60 31.02658 660 60 31.02658 690 60 31.60582 690 60 31.60582 720 60 32.38458 720 60 32.38458 750 60 33.33252 750 60 33.33252 780 60 34.41281 780 60 34.41281 810 60 35.58346 810 60 35.58346 840 60 36.79895 840 60 36.79895 870 60 38.01199 870 60 38.01199 900 60 39.17534 900 60 39.17534 930 60 42.60935 930 60 42.60935 960 60 50.89297 960 60 50.89297 990 60 62.84843 990 60 62.84843 1020 60 76.70965 1020 60 76.70965 1050 60 90.40016 1050 60 90.40016 1080 60 101.8499 1080 60 101.8499 1110 60 109.4538 1110 60 109.4538 1140 60 112.1287 1140 60 112.1287 1170 60 109.4538 1170 60 109.4538 1200 60 101.8499 1200 60 101.8499 990 1200 131.7811 990 1200 131.7811 1020 1170 143.6314 1020 1170 143.6314 1050 1140 152.0874 1050 1140 152.0874 1080 1110 156.4898 1080 1110 156.4898 1110 1080 156.1798 1110 1080 156.1798 1140 1050 150.5129 1140 1050 150.5129 1170 1020 140.0191 1170 1020 140.0191 1200 990 125.6263 1200 990 125.6263 0 690 47.51923 0 690 47.51923 30 660 52.64122 30 660 52.64122 60 630 58.82643 60 630 58.82643 90 600 66.1082 90 600 66.1082 120 570 73.67056 120 570 73.67056 150 540 80.71878 150 540 80.71878 180 510 86.72784 180 510 86.72784 210 480 91.33663 210 480 91.33663 240 450 94.16126 240 450 94.16126 270 420 94.9851 270 420 94.9851 300 390 93.76959 300 390 93.76959 330 360 90.52855 330 360 90.52855 360 330 85.23086 360 330 85.23086 390 300 78.33509 390 300 78.33509 420 270 70.45122 420 270 70.45122 450 240 62.27295 450 240 62.27295 480 210 54.52485 480 210 54.52485 510 180 47.90826 510 180 47.90826 540 150 42.4436 540 150 42.4436 570 120 37.58613 570 120 37.58613 600 90 33.75642 600 90 33.75642 630 60 31.34514 630 60 31.34514 660 30 30.66952 660 30 30.66952 690 30 31.26062 690 30 31.26062 720 30 32.05535 720 30 32.05535 750 30 33.02308 750 30 33.02308 780 30 34.12653 780 30 34.12653 810 30 35.32321 810 30 35.32321 840 30 36.56705 840 30 36.56705 870 30 37.81011 870 30 37.81011 900 30 39.00453 900 30 39.00453 930 30 41.53134 930 30 41.53134 960 30 48.57211 960 30 48.57211 990 30 59.20282 990 30 59.20282 1020 30 71.76814 1020 30 71.76814 1050 30 84.30492 1050 30 84.30492 1080 30 94.84523 1080 30 94.84523 1110 30 101.8499 1110 30 101.8499 1140 30 104.3162 1140 30 104.3162 1170 30 101.8499 1170 30 101.8499 1200 30 94.84523 1200 30 94.84523 1020 1200 140.5767 1020 1200 140.5767 1050 1170 151.0053 1050 1170 151.0053 1080 1140 157.6094 1080 1140 157.6094 1110 1110 159.8725 1110 1110 159.8725 1140 1080 157.2244 1140 1080 157.2244 1170 1050 149.3738 1170 1050 149.3738 1200 1020 137.0536 1200 1020 137.0536 0 660 49.01326 0 660 49.01326 30 630 54.40261 30 630 54.40261 60 600 61.32726 60 600 61.32726 90 570 68.99849 90 570 68.99849 120 540 76.60855 120 540 76.60855 150 510 83.57535 150 510 83.57535 180 480 89.43519 180 480 89.43519 210 450 93.71421 210 450 93.71421 240 420 96.0924 240 420 96.0924 270 390 96.42176 270 390 96.42176 300 360 94.7136 300 360 94.7136 330 330 90.82842 330 330 90.82842 360 300 85.01302 360 300 85.01302 390 270 77.79931 390 270 77.79931 420 240 69.8308 420 240 69.8308 450 210 61.81273 450 210 61.81273 480 180 54.45885 480 180 54.45885 510 150 48.13991 510 150 48.13991 540 120 42.24302 540 120 42.24302 570 90 37.12743 570 90 37.12743 600 60 33.23473 600 60 33.23473 630 30 30.94557 630 30 30.94557 660 0 30.54886 660 0 30.54886 690 0 31.14399 690 0 31.14399 720 0 31.94411 720 0 31.94411 750 0 32.91849 750 0 32.91849 780 0 34.02972 780 0 34.02972 810 0 35.23516 810 0 35.23516 840 0 36.48852 840 0 36.48852 870 0 37.74168 870 0 37.74168 900 0 38.94654 900 0 38.94654 930 0 40.46531 930 0 40.46531 960 0 45.55762 960 0 45.55762 990 0 54.10173 990 0 54.10173 1020 0 64.61825 1020 0 64.61825 1050 0 75.32489 1050 0 75.32489 1080 0 84.42024 1080 0 84.42024 1110 0 90.49303 1110 0 90.49303 1140 0 92.63808 1140 0 92.63808 1170 0 90.49303 1170 0 90.49303 1200 0 84.42024 1200 0 84.42024 1050 1200 147.8024 1050 1200 147.8024 1080 1170 156.4898 1080 1170 156.4898 1110 1140 161.0174 1110 1140 161.0174 1140 1110 161.0174 1140 1110 161.0174 1170 1080 156.078 1170 1080 156.078 1200 1050 146.2278 1200 1050 146.2278 0 630 50.51215 0 630 50.51215 30 600 56.81704 30 600 56.81704 60 570 64.32482 60 570 64.32482 90 540 72.23727 90 540 72.23727 120 510 79.94845 120 510 79.94845 150 480 86.89342 150 480 86.89342 180 450 92.52001 180 450 92.52001 210 420 96.41263 210 420 96.41263 240 390 98.31716 240 390 98.31716 270 360 98.15341 270 360 98.15341 300 330 95.85619 300 330 95.85619 330 300 91.36893 330 300 91.36893 360 270 85.1208 360 270 85.1208 390 240 77.68707 390 240 77.68707 420 210 69.73379 420 210 69.73379 450 180 61.96668 450 180 61.96668 480 150 54.9539 480 150 54.9539 510 120 48.19159 510 120 48.19159 540 90 41.929 540 90 41.929 570 60 36.65082 570 60 36.65082 600 30 32.79399 600 30 32.79399 630 0 30.72301 630 0 30.72301 1080 1200 153.1769 1080 1200 153.1769 1110 1170 159.8725 1110 1170 159.8725 1140 1140 162.1806 1140 1140 162.1806 1170 1110 159.8725 1170 1110 159.8725 1200 1080 152.7919 1200 1080 152.7919 0 600 52.74223 0 600 52.74223 30 570 59.81901 30 570 59.81901 60 540 67.77236 60 540 67.77236 90 510 75.98964 90 510 75.98964 120 480 83.81912 120 480 83.81912 150 450 90.6441 150 450 90.6441 180 420 95.96376 180 420 95.96376 210 390 99.42412 210 390 99.42412 240 360 100.8374 240 360 100.8374 270 330 100.1648 270 330 100.1648 300 300 97.2097 300 300 97.2097 330 270 92.1892 330 270 92.1892 360 240 85.59434 360 240 85.59434 390 210 78.03334 390 210 78.03334 420 180 70.18389 420 180 70.18389 450 150 62.70189 450 150 62.70189 480 120 55.27494 480 120 55.27494 510 90 48.03721 510 90 48.03721 540 60 41.50741 540 60 41.50741 570 30 36.17177 570 30 36.17177 600 0 32.45491 600 0 32.45491 1110 1200 156.4898 1110 1200 156.4898 1140 1170 161.0174 1140 1170 161.0174 1170 1140 161.0174 1170 1140 161.0174 1200 1110 156.4898 1200 1110 156.4898 0 570 55.64655 0 570 55.64655 30 540 63.38962 30 540 63.38962 60 510 71.85778 60 510 71.85778 90 480 80.34344 90 480 80.34344 120 450 88.18078 120 450 88.18078 150 420 94.79633 150 420 94.79633 180 390 99.74518 180 390 99.74518 210 360 102.7368 210 360 102.7368 240 330 103.649 240 330 103.649 270 300 102.3709 270 300 102.3709 300 270 98.79909 300 270 98.79909 330 240 93.31714 330 240 93.31714 360 210 86.45859 360 210 86.45859 390 180 78.85414 390 180 78.85414 420 150 71.17486 420 150 71.17486 450 120 63.30546 450 120 63.30546 480 90 55.29285 480 90 55.29285 510 60 47.67963 510 60 47.67963 540 30 40.9927 540 30 40.9927 570 0 35.71234 570 0 35.71234 1140 1200 157.6094 1140 1200 157.6094 1170 1170 159.8725 1170 1170 159.8725 1200 1140 157.6094 1200 1140 157.6094 0 540 59.26631 0 540 59.26631 30 510 67.72166 30 510 67.72166 60 480 76.61517 60 480 76.61517 90 450 85.24852 90 450 85.24852 120 420 92.99006 120 420 92.99006 150 390 99.31526 150 390 99.31526 180 360 103.8382 180 360 103.8382 210 330 106.3323 210 330 106.3323 240 300 106.7209 240 300 106.7209 270 270 104.7718 270 270 104.7718 300 240 100.6388 300 240 100.6388 330 210 94.76629 330 210 94.76629 360 180 87.72056 360 180 87.72056 390 150 80.14411 390 150 80.14411 420 120 72.06981 420 120 72.06981 450 90 63.50569 450 90 63.50569 480 60 55.00722 480 60 55.00722 510 30 47.13199 510 30 47.13199 540 0 40.40731 540 0 40.40731 1170 1200 156.4898 1170 1200 156.4898 1200 1170 156.4898 1200 1170 156.4898 0 510 63.75323 0 510 63.75323 30 480 72.79447 30 480 72.79447 60 450 81.98461 60 450 81.98461 90 420 90.64965 90 420 90.64965 120 390 98.19877 120 390 98.19877 150 360 104.1607 150 360 104.1607 180 330 108.2102 180 330 108.2102 210 300 110.1839 210 300 110.1839 240 270 109.9597 240 270 109.9597 270 240 107.3672 270 240 107.3672 300 210 102.7296 300 210 102.7296 330 180 96.53338 330 180 96.53338 360 150 89.36752 360 150 89.36752 390 120 81.33476 390 120 81.33476 420 90 72.45958 420 90 72.45958 450 60 63.29876 450 60 63.29876 480 30 54.42971 480 30 54.42971 510 0 46.41709 510 0 46.41709 1200 1200 153.1769 1200 1200 153.1769 0 480 69.04643 0 480 69.04643 30 450 78.53922 30 450 78.53922 60 420 87.89971 60 420 87.89971 90 390 96.48569 90 390 96.48569 120 360 103.7526 120 360 103.7526 150 330 109.2853 150 330 109.2853 180 300 112.82 180 300 112.82 210 270 114.2497 210 270 114.2497 240 240 113.3398 240 240 113.3398 270 210 110.1444 270 210 110.1444 300 180 105.057 300 180 105.057 330 150 98.59619 330 150 98.59619 360 120 90.85349 360 120 90.85349 390 90 81.91824 390 90 81.91824 420 60 72.33676 420 60 72.33676 450 30 62.69473 450 30 62.69473 480 0 53.5835 480 0 53.5835 0 450 75.06912 0 450 75.06912 30 420 84.8791 30 420 84.8791 60 390 94.28691 60 390 94.28691 90 360 102.6885 90 360 102.6885 120 330 109.5898 120 330 109.5898 150 300 114.6335 150 300 114.6335 180 270 117.6162 180 270 117.6162 210 240 118.427 210 240 118.427 240 210 116.8341 240 210 116.8341 270 180 113.0764 270 180 113.0764 300 150 107.5812 300 150 107.5812 330 120 100.3723 330 120 100.3723 360 90 91.6317 360 90 91.6317 390 60 81.8835 390 60 81.8835 420 30 71.70975 420 30 71.70975 450 0 61.71707 450 0 61.71707 0 420 81.73504 0 420 81.73504 30 390 91.72897 30 390 91.72897 60 360 101.0649 60 360 101.0649 90 330 109.1823 90 330 109.1823 120 300 115.6401 120 300 115.6401 150 270 120.1397 150 270 120.1397 180 240 122.5366 180 240 122.5366 210 210 122.6536 210 210 122.6536 240 180 120.4022 240 180 120.4022 270 150 116.0805 270 150 116.0805 300 120 109.6375 300 120 109.6375 330 90 101.343 330 90 101.343 360 60 91.68743 360 60 91.68743 390 30 81.23713 390 30 81.23713 420 0 70.60217 420 0 70.60217 0 390 88.94842 0 390 88.94842 30 360 98.99509 30 360 98.99509 60 330 108.1442 60 330 108.1442 90 300 115.8824 90 300 115.8824 120 270 121.824 120 270 121.824 150 240 125.7282 150 240 125.7282 180 210 127.4922 180 210 127.4922 210 180 126.8752 210 180 126.8752 240 150 123.8653 240 150 123.8653 270 120 118.4025 270 120 118.4025 300 90 110.7953 300 90 110.7953 330 60 101.49 330 60 101.49 360 30 91.02538 360 30 91.02538 390 0 80.00288 390 0 80.00288 0 360 96.60407 0 360 96.60407 30 330 106.575 30 330 106.575 60 300 115.4264 60 300 115.4264 90 270 122.6956 90 270 122.6956 120 240 128.0523 120 240 128.0523 150 210 131.3127 150 210 131.3127 180 180 132.3569 180 180 132.3569 210 150 130.7267 210 150 130.7267 240 120 126.4341 240 120 126.4341 270 90 119.7388 270 90 119.7388 300 60 111.0326 300 60 111.0326 330 30 100.8159 330 30 100.8159 360 0 89.66933 360 0 89.66933 0 330 104.5876 0 330 104.5876 30 300 114.3574 30 300 114.3574 60 270 122.8049 60 270 122.8049 90 240 129.5193 90 240 129.5193 120 210 134.2182 120 210 134.2182 150 180 136.6394 150 180 136.6394 180 150 136.4809 180 150 136.4809 210 120 133.5192 210 120 133.5192 240 90 127.9374 240 90 127.9374 270 60 120.0639 270 60 120.0639 300 30 110.3502 300 30 110.3502 330 0 99.34458 330 0 99.34458 0 300 112.7356 0 300 112.7356 30 270 122.1829 30 270 122.1829 60 240 130.0816 60 240 130.0816 90 210 136.0301 90 210 136.0301 120 180 139.7255 120 180 139.7255 150 150 140.9743 150 150 140.9743 180 120 139.4706 180 120 139.4706 210 90 135.1751 210 90 135.1751 240 60 128.3461 240 60 128.3461 270 30 119.3766 270 30 119.3766 300 0 108.7716 300 0 108.7716 0 270 120.5539 0 270 120.5539 30 240 129.501 30 240 129.501 60 210 136.6394 60 210 136.6394 90 180 141.6094 90 180 141.6094 120 150 144.1594 120 150 144.1594 150 120 144.1314 150 120 144.1314 180 90 141.2623 180 90 141.2623 210 60 135.662 210 60 135.662 240 30 127.6572 240 30 127.6572 270 0 117.7001 270 0 117.7001 0 240 127.7744 0 240 127.7744 30 210 136.0301 30 210 136.0301 60 180 142.2428 60 180 142.2428 90 150 146.1011 90 150 146.1011 120 120 147.4097 120 120 147.4097 150 90 146.0401 150 90 146.0401 180 60 141.8203 180 60 141.8203 210 30 134.9748 210 30 134.9748 240 0 125.8937 240 0 125.8937 0 210 134.2182 0 210 134.2182 30 180 141.6094 30 180 141.6094 60 150 146.754 60 150 146.754 90 120 149.3945 90 120 149.3945 120 90 149.3945 120 90 149.3945 150 60 146.6611 150 60 146.6611 180 30 141.1377 180 30 141.1377 210 0 133.1361 210 0 133.1361 0 180 139.7255 0 180 139.7255 30 150 146.1011 30 150 146.1011 60 120 150.0621 60 120 150.0621 90 90 151.4073 90 90 151.4073 120 60 150.0621 120 60 150.0621 150 30 145.9858 150 30 145.9858 180 0 139.2367 180 0 139.2367 0 150 144.1594 0 150 144.1594 30 120 149.3945 30 120 149.3945 60 90 152.0857 60 90 152.0857 90 60 152.0857 90 60 152.0857 120 30 149.3945 120 30 149.3945 150 0 144.0361 150 0 144.0361 0 120 147.4097 0 120 147.4097 30 90 151.4073 30 90 151.4073 60 60 152.775 60 60 152.775 90 30 151.4073 90 30 151.4073 120 0 147.4097 120 0 147.4097 0 90 149.3945 0 90 149.3945 30 60 152.0857 30 60 152.0857 60 30 152.0857 60 30 152.0857 90 0 149.3945 90 0 149.3945 0 60 150.0621 0 60 150.0621 30 30 151.4073 30 30 151.4073 60 0 150.0621 60 0 150.0621 0 30 149.3945 0 30 149.3945 30 0 149.3945 30 0 149.3945 0 0 147.4097 0 0 147.4097 + + + + + + + + + + -0.09029843 0.1128016 -0.9895059 -0.02122871 0.06888646 -0.9973986 -0.06570754 0.09718672 -0.9930948 0.06570754 -0.09718672 0.9930948 0.02122871 -0.06888646 0.9973986 0.09029843 -0.1128016 0.9895059 -0.05450788 0.08464877 -0.9949188 0.05450788 -0.08464877 0.9949188 0.00953371 0.0331685 -0.9994043 -0.00953371 -0.0331685 0.9994043 -0.1499607 0.129822 -0.9801317 0.1499607 -0.129822 0.9801317 0.002068551 0.03286248 -0.9994577 -0.002068551 -0.03286248 0.9994577 -0.1114423 0.1051986 -0.9881872 0.1114423 -0.1051986 0.9881872 0.03981157 0.00143875 -0.9992062 -0.03981157 -0.00143875 0.9992062 -0.03908075 0.05996525 -0.9974351 0.03908075 -0.05996525 0.9974351 -0.1332968 0.1200277 -0.9837811 0.1332968 -0.1200277 0.9837811 -0.1595876 0.1159409 -0.9803517 0.1595876 -0.1159409 0.9803517 -0.03899358 0.04674012 -0.9981457 0.03899358 -0.04674012 0.9981457 -0.08823475 0.07846916 -0.9930041 0.08823475 -0.07846916 0.9930041 0.07117285 -0.04234588 -0.9965647 -0.07117285 0.04234588 0.9965647 0.03604054 -0.0168226 -0.9992087 -0.03604054 0.0168226 0.9992087 -0.2165879 0.1368417 -0.9666251 0.2165879 -0.1368417 0.9666251 -0.2179938 0.114986 -0.9691527 0.2179938 -0.114986 0.9691527 0.01248374 -0.009952491 -0.9998725 -0.01248374 0.009952491 0.9998725 0.05524529 -0.03621899 -0.9978157 -0.05524529 0.03621899 0.9978157 -0.1289555 0.0763467 -0.9887071 0.1289555 -0.0763467 0.9887071 -0.1619137 0.08232612 -0.9833648 0.1619137 -0.08232612 0.9833648 0.09000074 -0.07150958 -0.9933712 -0.09000074 0.07150958 0.9933712 -0.01315608 0.006519686 -0.9998922 0.01315608 -0.006519686 0.9998922 -0.1103314 0.08627194 -0.9901435 0.1103314 -0.08627194 0.9901435 -0.2060967 0.1356548 -0.969083 0.2060967 -0.1356548 0.969083 -0.2104637 0.07377719 -0.9748138 0.2104637 -0.07377719 0.9748138 -0.0349225 0.001690013 -0.9993886 0.0349225 -0.001690013 0.9993886 -0.08075808 0.04098773 -0.9958906 0.08075808 -0.04098773 0.9958906 -0.1201721 0.05209894 -0.9913851 0.1201721 -0.05209894 0.9913851 0.1140449 -0.1088356 -0.9874961 -0.1140449 0.1088356 0.9874961 0.09402572 -0.09014257 -0.9914804 -0.09402572 0.09014257 0.9914804 0.05696926 -0.07213399 -0.9957666 -0.05696926 0.07213399 0.9957666 -0.2673724 0.138201 -0.9536312 0.2673724 -0.138201 0.9536312 -0.2623652 0.1056929 -0.9591629 0.2623652 -0.1056929 0.9591629 -0.2468321 0.0551836 -0.9674858 0.2468321 -0.0551836 0.9674858 0.008091622 -0.05748227 -0.9983137 -0.008091622 0.05748227 0.9983137 0.05275226 -0.07643689 -0.995678 -0.05275226 0.07643689 0.995678 0.08788363 -0.1015468 -0.9909413 -0.08788363 0.1015468 0.9909413 -0.1436692 0.03165597 -0.9891193 0.1436692 -0.03165597 0.9891193 -0.1569524 0.03468662 -0.9869969 0.1569524 -0.03468662 0.9869969 -0.1916923 0.0132184 -0.9813661 0.1916923 -0.0132184 0.9813661 0.1113154 -0.1270379 -0.9856319 -0.1113154 0.1270379 0.9856319 0.008327428 -0.05427833 -0.9984911 -0.008327428 0.05427833 0.9984911 -0.08339446 0.03175246 -0.9960106 0.08339446 -0.03175246 0.9960106 -0.1868036 0.1104129 -0.9761728 0.1868036 -0.1104129 0.9761728 -0.2616743 0.14768 -0.9537909 0.2616743 -0.14768 0.9537909 -0.2202977 -0.02286924 -0.9751646 0.2202977 0.02286924 0.9751646 -0.042305 -0.04885918 -0.9979093 0.042305 0.04885918 0.9979093 -0.08292851 -0.003453375 -0.9965495 0.08292851 0.003453375 0.9965495 -0.120488 0.01750666 -0.9925604 0.120488 -0.01750666 0.9925604 -0.1477785 0.01165802 -0.9889518 0.1477785 -0.01165802 0.9889518 0.1223601 -0.1464749 -0.9816176 -0.1223601 0.1464749 0.9816176 0.1264474 -0.1402668 -0.9820062 -0.1264474 0.1402668 0.9820062 0.107473 -0.1323601 -0.985358 -0.107473 0.1323601 0.985358 0.07126759 -0.1225521 -0.9898999 -0.07126759 0.1225521 0.9898999 -0.2892829 0.1328829 -0.9479755 0.2892829 -0.1328829 0.9479755 -0.277531 0.0887586 -0.9566078 0.277531 -0.0887586 0.9566078 -0.2552052 0.02865194 -0.9664623 0.2552052 -0.02865194 0.9664623 -0.2254774 -0.05970058 -0.9724175 0.2254774 0.05970058 0.9724175 -0.01178203 -0.104669 -0.9944373 0.01178203 0.104669 0.9944373 0.03090886 -0.1159752 -0.9927711 -0.03090886 0.1159752 0.9927711 0.06441557 -0.1306224 -0.9893374 -0.06441557 0.1306224 0.9893374 0.08649674 -0.1441386 -0.9857699 -0.08649674 0.1441386 0.9857699 -0.1501359 -0.008050399 -0.9886326 0.1501359 0.008050399 0.9886326 -0.1403387 -0.02703426 -0.9897344 0.1403387 0.02703426 0.9897344 -0.1518635 -0.07208219 -0.9857696 0.1518635 0.07208219 0.9857696 -0.1727706 -0.126873 -0.9767567 0.1727706 0.126873 0.9767567 0.09637685 -0.1514015 -0.9837627 -0.09637685 0.1514015 0.9837627 0.024324 -0.1105829 -0.9935692 -0.024324 0.1105829 0.9935692 -0.05555833 -0.03306097 -0.9979079 0.05555833 0.03306097 0.9979079 -0.1593121 0.06266953 -0.9852371 0.1593121 -0.06266953 0.9852371 -0.2440589 0.1329862 -0.9605987 0.2440589 -0.1329862 0.9605987 -0.286506 0.1578572 -0.9449843 0.286506 -0.1578572 0.9449843 -0.187862 -0.1739342 -0.966672 0.187862 0.1739342 0.966672 -0.06002463 -0.09959897 -0.9932155 0.06002463 0.09959897 0.9932155 -0.09321096 -0.0520337 -0.9942858 0.09321096 0.0520337 0.9942858 -0.1273216 -0.02307803 -0.991593 0.1273216 0.02307803 0.991593 -0.1541052 -0.01406403 -0.9879543 0.1541052 0.01406403 0.9879543 -0.1641552 -0.01134025 -0.9863693 0.1641552 0.01134025 0.9863693 0.09458427 -0.1475221 -0.9845258 -0.09458427 0.1475221 0.9845258 0.1216281 -0.1538425 -0.980581 -0.1216281 0.1538425 0.980581 0.1277724 -0.1586432 -0.9790335 -0.1277724 0.1586432 0.9790335 0.1117009 -0.1606538 -0.9806698 -0.1117009 0.1606538 0.9806698 0.07887129 -0.1586485 -0.9841798 -0.07887129 0.1586485 0.9841798 -0.275542 0.1249741 -0.9531307 0.275542 -0.1249741 0.9531307 -0.2594777 0.06622681 -0.9634757 0.2594777 -0.06622681 0.9634757 -0.2334775 -0.001121242 -0.9723616 0.2334775 0.001121242 0.9723616 -0.20838 -0.08706978 -0.9741646 0.20838 0.08706978 0.9741646 -0.1907763 -0.2005876 -0.9609209 0.1907763 0.2005876 0.9609209 -0.03895416 -0.1465274 -0.9884393 0.03895416 0.1465274 0.9884393 -0.004390542 -0.1518585 -0.9883925 0.004390542 0.1518585 0.9883925 0.02488452 -0.157156 -0.9872602 -0.02488452 0.157156 0.9872602 0.04468143 -0.1595489 -0.9861783 -0.04468143 0.1595489 0.9861783 0.05369019 -0.1555717 -0.9863644 -0.05369019 0.1555717 0.9863644 -0.1286951 -0.03806936 -0.9909532 0.1286951 0.03806936 0.9909532 -0.09203988 -0.08797085 -0.9918618 0.09203988 0.08797085 0.9918618 -0.08666034 -0.1525617 -0.9844871 0.08666034 0.1525617 0.9844871 -0.1122561 -0.2185926 -0.9693378 0.1122561 0.2185926 0.9693378 -0.1441695 -0.26848 -0.9524356 0.1441695 0.26848 0.9524356 0.05351056 -0.1418416 -0.988442 -0.05351056 0.1418416 0.988442 0.03627991 -0.1518042 -0.9877445 -0.03627991 0.1518042 0.9877445 -0.02878661 -0.09289356 -0.9952598 0.02878661 0.09289356 0.9952598 -0.1180231 0.003576532 -0.9930044 0.1180231 -0.003576532 0.9930044 -0.2041702 0.1049674 -0.9732915 0.2041702 -0.1049674 0.9732915 -0.2576332 0.1708887 -0.9510111 0.2576332 -0.1708887 0.9510111 -0.2655311 0.1782735 -0.9474765 0.2655311 -0.1782735 0.9474765 -0.1698541 -0.2960252 -0.9399567 0.1698541 0.2960252 0.9399567 -0.07091643 -0.1383162 -0.9878459 0.07091643 0.1383162 0.9878459 -0.1034877 -0.09686252 -0.989903 0.1034877 0.09686252 0.989903 -0.1351851 -0.06039508 -0.988978 0.1351851 0.06039508 0.988978 -0.1580902 -0.03746115 -0.9867138 0.1580902 0.03746115 0.9867138 -0.1687263 -0.01947005 -0.9854706 0.1687263 0.01947005 0.9854706 -0.1597348 -0.0081843 -0.987126 0.1597348 0.0081843 0.987126 0.04820235 -0.1157885 -0.9921036 -0.04820235 0.1157885 0.9921036 0.08269073 -0.1286248 -0.9882398 -0.08269073 0.1286248 0.9882398 0.1106763 -0.1443493 -0.9833179 -0.1106763 0.1443493 0.9833179 0.1195078 -0.1583851 -0.9801184 -0.1195078 0.1583851 0.9801184 0.1078026 -0.1687114 -0.9797525 -0.1078026 0.1687114 0.9797525 0.08051388 -0.1733794 -0.9815585 -0.08051388 0.1733794 0.9815585 -0.2267409 0.1242892 -0.9659921 0.2267409 -0.1242892 0.9659921 -0.2197042 0.04084297 -0.9747112 0.2197042 -0.04084297 0.9747112 -0.1962091 -0.02648488 -0.9802043 0.1962091 0.02648488 0.9802043 -0.183647 -0.09870613 -0.978024 0.183647 0.09870613 0.978024 -0.1861445 -0.2029884 -0.9613251 0.1861445 0.2029884 0.9613251 -0.1894312 -0.3002073 -0.9348751 0.1894312 0.3002073 0.9348751 -0.03933424 -0.1700066 -0.9846576 0.03933424 0.1700066 0.9846576 -0.02143496 -0.1773578 -0.983913 0.02143496 0.1773578 0.983913 -0.002895968 -0.1784674 -0.9839416 0.002895968 0.1784674 0.9839416 0.01246262 -0.1730167 -0.98484 -0.01246262 0.1730167 0.98484 0.02272897 -0.1607766 -0.9867291 -0.02272897 0.1607766 0.9867291 0.02823619 -0.1416802 -0.9895097 -0.02823619 0.1416802 0.9895097 -0.07209583 -0.07015924 -0.9949271 0.07209583 0.07015924 0.9949271 -0.02617937 -0.1402304 -0.9897727 0.02617937 0.1402304 0.9897727 -0.01820379 -0.2058699 -0.97841 0.01820379 0.2058699 0.97841 -0.04786654 -0.2652634 -0.9629871 0.04786654 0.2652634 0.9629871 -0.0971846 -0.3141193 -0.9443962 0.0971846 0.3141193 0.9443962 -0.1466761 -0.3433649 -0.9276781 0.1466761 0.3433649 0.9276781 0.03143816 -0.115917 -0.9927613 -0.03143816 0.115917 0.9927613 0.04459217 -0.1718888 -0.9841066 -0.04459217 0.1718888 0.9841066 -0.002773833 -0.1363961 -0.9906505 0.002773833 0.1363961 0.9906505 -0.06535267 -0.05394734 -0.9964029 0.06535267 0.05394734 0.9964029 -0.143447 0.06493568 -0.9875254 0.143447 -0.06493568 0.9875254 -0.2051498 0.1658196 -0.9645815 0.2051498 -0.1658196 0.9645815 -0.2311001 0.2160381 -0.9486413 0.2311001 -0.2160381 0.9486413 -0.2087826 0.2042738 -0.9563901 0.2087826 -0.2042738 0.9563901 -0.1915713 -0.3488048 -0.917407 0.1915713 0.3488048 0.917407 -0.05326443 -0.1552227 -0.9864425 0.05326443 0.1552227 0.9864425 -0.09248182 -0.1213463 -0.9882926 0.09248182 0.1213463 0.9882926 -0.1287748 -0.08337758 -0.9881626 0.1287748 0.08337758 0.9881626 -0.1533417 -0.05105578 -0.9868534 0.1533417 0.05105578 0.9868534 -0.1663189 -0.02405637 -0.9857785 0.1663189 0.02405637 0.9857785 -0.1704003 -0.0006334557 -0.9853747 0.1704003 0.0006334557 0.9853747 -0.1290714 -0.01147705 -0.9915689 0.1290714 0.01147705 0.9915689 0.03634671 -0.0840874 -0.9957953 -0.03634671 0.0840874 0.9957953 0.04191471 -0.08109786 -0.9958244 -0.04191471 0.08109786 0.9958244 0.06471449 -0.09413699 -0.9934537 -0.06471449 0.09413699 0.9934537 0.09170325 -0.1150685 -0.9891156 -0.09170325 0.1150685 0.9891156 0.103645 -0.1359963 -0.9852729 -0.103645 0.1359963 0.9852729 0.09731585 -0.1527317 -0.9834646 -0.09731585 0.1527317 0.9834646 0.07544879 -0.1654181 -0.9833333 -0.07544879 0.1654181 0.9833333 -0.145452 0.1374521 -0.9797707 0.145452 -0.1374521 0.9797707 -0.1608878 0.03267722 -0.9864316 0.1608878 -0.03267722 0.9864316 -0.161212 -0.04103176 -0.9860665 0.161212 0.04103176 0.9860665 -0.1604105 -0.09428944 -0.9825365 0.1604105 0.09428944 0.9825365 -0.1780326 -0.1848194 -0.9665124 0.1780326 0.1848194 0.9665124 -0.203941 -0.2813089 -0.9376958 0.203941 0.2813089 0.9376958 -0.2298747 -0.3305789 -0.9153553 0.2298747 0.3305789 0.9153553 -0.01475563 -0.1814074 -0.9832973 0.01475563 0.1814074 0.9832973 -0.00488707 -0.1897049 -0.981829 0.00488707 0.1897049 0.981829 0.004822929 -0.1904702 -0.9816811 -0.004822929 0.1904702 0.9816811 0.01418474 -0.1837295 -0.9828745 -0.01418474 0.1837295 0.9828745 0.02301217 -0.1697185 -0.9852239 -0.02301217 0.1697185 0.9852239 0.03111823 -0.1488984 -0.9883627 -0.03111823 0.1488984 0.9883627 0.03831575 -0.1219855 -0.991792 -0.03831575 0.1219855 0.991792 -0.0002234047 -0.09138154 -0.9958159 0.0002234047 0.09138154 0.9958159 0.04550458 -0.1624336 -0.9856696 -0.04550458 0.1624336 0.9856696 0.04488608 -0.2274386 -0.9727574 -0.04488608 0.2274386 0.9727574 0.00693141 -0.2799556 -0.9599879 -0.00693141 0.2799556 0.9599879 -0.05388503 -0.3195067 -0.9460507 0.05388503 0.3195067 0.9460507 -0.1236996 -0.3439613 -0.9308002 0.1236996 0.3439613 0.9308002 -0.1941788 -0.3486718 -0.9169093 0.1941788 0.3486718 0.9169093 0.04442405 -0.08997475 -0.9949528 -0.04442405 0.08997475 0.9949528 0.04504783 -0.1743018 -0.9836613 -0.04504783 0.1743018 0.9836613 0.01946269 -0.1620061 -0.9865978 -0.01946269 0.1620061 0.9865978 -0.0153336 -0.1015323 -0.9947141 0.0153336 0.1015323 0.9947141 -0.07621534 0.009172248 -0.9970492 0.07621534 -0.009172248 0.9970492 -0.1432562 0.1319041 -0.9808562 0.1432562 -0.1319041 0.9808562 -0.1842637 0.2224808 -0.9573658 0.1842637 -0.2224808 0.9573658 -0.1773539 0.2554825 -0.9504074 0.1773539 -0.2554825 0.9504074 -0.124042 0.2292866 -0.9654228 0.124042 -0.2292866 0.9654228 -0.2557507 -0.3311912 -0.9082422 0.2557507 0.3311912 0.9082422 -0.02458346 -0.1657636 -0.9858591 0.02458346 0.1657636 0.9858591 -0.06464095 -0.1331161 -0.9889902 0.06464095 0.1331161 0.9889902 -0.1045863 -0.09460317 -0.990006 0.1045863 0.09460317 0.990006 -0.1374399 -0.05680031 -0.9888802 0.1374399 0.05680031 0.9888802 -0.1569961 -0.02643365 -0.9872454 0.1569961 0.02643365 0.9872454 -0.167552 -0.0003090812 -0.9858632 0.167552 0.0003090812 0.9858632 -0.1633347 0.01596596 -0.9864415 0.1633347 -0.01596596 0.9864415 -0.0755545 -0.0220741 -0.9968973 0.0755545 0.0220741 0.9968973 0.04928165 -0.0541412 -0.9973164 -0.04928165 0.0541412 0.9973164 0.04289988 -0.04890321 -0.9978818 -0.04289988 0.04890321 0.9978818 0.0392763 -0.04412666 -0.9982536 -0.0392763 0.04412666 0.9982536 0.04931032 -0.04796263 -0.9976312 -0.04931032 0.04796263 0.9976312 0.06845718 -0.06693729 -0.995406 -0.06845718 0.06693729 0.995406 0.0817858 -0.09267359 -0.9923319 -0.0817858 0.09267359 0.9923319 0.07798855 -0.1190982 -0.9898148 -0.07798855 0.1190982 0.9898148 0.05844604 -0.1432245 -0.987963 -0.05844604 0.1432245 0.987963 -0.05885526 0.1617459 -0.9850758 0.05885526 -0.1617459 0.9850758 -0.09293194 0.05622218 -0.9940839 0.09293194 -0.05622218 0.9940839 -0.1218731 -0.03272687 -0.992006 0.1218731 0.03272687 0.992006 -0.132297 -0.08278855 -0.9877467 0.132297 0.08278855 0.9877467 -0.1544248 -0.1509187 -0.97641 0.1544248 0.1509187 0.97641 -0.2007593 -0.2411764 -0.9494891 0.2007593 0.2411764 0.9494891 -0.2480777 -0.2922238 -0.9236139 0.2480777 0.2922238 0.9236139 -0.2929757 -0.295799 -0.909213 0.2929757 0.295799 0.909213 0.01219937 -0.1897758 -0.9817517 -0.01219937 0.1897758 0.9817517 0.01950127 -0.1978677 -0.9800347 -0.01950127 0.1978677 0.9800347 0.02649566 -0.1982799 -0.9797872 -0.02649566 0.1982799 0.9797872 0.03304885 -0.1910482 -0.9810241 -0.03304885 0.1910482 0.9810241 0.03902661 -0.1764105 -0.9835427 -0.03902661 0.1764105 0.9835427 0.04429192 -0.154827 -0.9869482 -0.04429192 0.154827 0.9869482 0.04870737 -0.1270142 -0.9907043 -0.04870737 0.1270142 0.9907043 0.05214293 -0.09397466 -0.9942082 -0.05214293 0.09397466 0.9942082 0.0701857 -0.08449736 -0.9939488 -0.0701857 0.08449736 0.9939488 0.1135034 -0.1484145 -0.98239 -0.1135034 0.1484145 0.98239 0.1058413 -0.2083984 -0.9723002 -0.1058413 0.2083984 0.9723002 0.05454999 -0.2591253 -0.964302 -0.05454999 0.2591253 0.964302 -0.02134944 -0.2949328 -0.9552795 0.02134944 0.2949328 0.9552795 -0.1081562 -0.3137056 -0.9433404 0.1081562 0.3137056 0.9433404 -0.1990444 -0.3142474 -0.9282402 0.1990444 0.3142474 0.9282402 -0.2800606 -0.2965294 -0.9130369 0.2800606 0.2965294 0.9130369 0.05448837 -0.05700255 -0.996886 -0.05448837 0.05700255 0.996886 0.03864261 -0.1591558 -0.9864969 -0.03864261 0.1591558 0.9864969 0.03591469 -0.17005 -0.9847808 -0.03591469 0.17005 0.9847808 0.02333364 -0.1344342 -0.9906478 -0.02333364 0.1344342 0.9906478 -0.01403 -0.05083914 -0.9986083 0.01403 0.05083914 0.9986083 -0.07532911 0.07679061 -0.9941975 0.07532911 -0.07679061 0.9941975 -0.1244016 0.1986205 -0.9721492 0.1244016 -0.1986205 0.9721492 -0.1335972 0.274524 -0.9522544 0.1335972 -0.274524 0.9522544 -0.0945353 0.29117 -0.951989 0.0945353 -0.29117 0.951989 -0.02723524 0.2547709 -0.9666178 0.02723524 -0.2547709 0.9666178 -0.3320152 -0.265994 -0.9049934 0.3320152 0.265994 0.9049934 0.004728244 -0.1741737 -0.9847036 -0.004728244 0.1741737 0.9847036 -0.03415642 -0.1431948 -0.9891049 0.03415642 0.1431948 0.9891049 -0.07518232 -0.105046 -0.9916214 0.07518232 0.105046 0.9916214 -0.1140014 -0.06375232 -0.991433 0.1140014 0.06375232 0.991433 -0.1415538 -0.02871672 -0.989514 0.1415538 0.02871672 0.989514 -0.1581193 -0.0002803224 -0.98742 0.1581193 0.0002803224 0.98742 -0.1662737 0.0234466 -0.9858009 0.1662737 -0.0234466 0.9858009 -0.1375361 0.02877919 -0.9900786 0.1375361 -0.02877919 0.9900786 -0.01071079 -0.01898142 -0.9997625 0.01071079 0.01898142 0.9997625 0.05566537 -0.01765385 -0.9982934 -0.05566537 0.01765385 0.9982934 0.05273119 -0.01610311 -0.9984789 -0.05273119 0.01610311 0.9984789 0.048427 -0.01274213 -0.9987454 -0.048427 0.01274213 0.9987454 0.04339433 -0.00830397 -0.9990235 -0.04339433 0.00830397 0.9990235 0.0417556 -0.007415689 -0.9991003 -0.0417556 0.007415689 0.9991003 0.04617973 -0.01961286 -0.9987406 -0.04617973 0.01961286 0.9987406 0.0520384 -0.04499523 -0.9976309 -0.0520384 0.04499523 0.9976309 0.0480728 -0.07712901 -0.9958615 -0.0480728 0.07712901 0.9958615 0.03555814 -0.1047204 -0.9938658 -0.03555814 0.1047204 0.9938658 0.009264339 0.2005532 -0.979639 -0.009264339 -0.2005532 0.979639 -0.03443592 0.0972527 -0.9946638 0.03443592 -0.0972527 0.9946638 -0.07593956 -0.005488413 -0.9970973 0.07593956 0.005488413 0.9970973 -0.09889974 -0.07008039 -0.9926266 0.09889974 0.07008039 0.9926266 -0.1175919 -0.1130094 -0.9866109 0.1175919 0.1130094 0.9866109 -0.1698806 -0.1823236 -0.9684517 0.1698806 0.1823236 0.9684517 -0.2354752 -0.2364456 -0.9426797 0.2354752 0.2364456 0.9426797 -0.2960854 -0.2458881 -0.9229694 0.2960854 0.2458881 0.9229694 -0.3462358 -0.226497 -0.9103954 0.3462358 0.226497 0.9103954 0.0383742 -0.1955783 -0.979937 -0.0383742 0.1955783 0.979937 0.04294092 -0.2032922 -0.978176 -0.04294092 0.2032922 0.978176 0.04706846 -0.203236 -0.9779978 -0.04706846 0.203236 0.9779978 0.05067692 -0.1954501 -0.9794034 -0.05067692 0.1954501 0.9794034 0.05368196 -0.1801727 -0.982169 -0.05368196 0.1801727 0.982169 0.05599515 -0.157863 -0.9858721 -0.05599515 0.157863 0.9858721 0.05752872 -0.1292392 -0.9899433 -0.05752872 0.1292392 0.9899433 0.05820471 -0.09531337 -0.9937442 -0.05820471 0.09531337 0.9937442 0.05796686 -0.05739936 -0.996667 -0.05796686 0.05739936 0.996667 0.1300159 -0.04982194 -0.9902594 -0.1300159 0.04982194 0.9902594 0.1715406 -0.1012166 -0.9799638 -0.1715406 0.1012166 0.9799638 0.1603422 -0.1510105 -0.9754415 -0.1603422 0.1510105 0.9754415 0.09961293 -0.1968332 -0.9753635 -0.09961293 0.1968332 0.9753635 0.009087905 -0.2321008 -0.9726493 -0.009087905 0.2321008 0.9726493 -0.09618477 -0.2525325 -0.9627958 0.09618477 0.2525325 0.9627958 -0.2057056 -0.2543115 -0.9449925 0.2057056 0.2543115 0.9449925 -0.3010906 -0.2405394 -0.9227596 0.3010906 0.2405394 0.9227596 -0.3613919 -0.2183316 -0.9064917 0.3613919 0.2183316 0.9064917 0.05679463 -0.0170792 -0.9982398 -0.05679463 0.0170792 0.9982398 0.03180203 -0.1206335 -0.9921876 -0.03180203 0.1206335 0.9921876 0.05041117 -0.1555585 -0.9865395 -0.05041117 0.1555585 0.9865395 0.05668301 -0.1457005 -0.9877036 -0.05668301 0.1457005 0.9877036 0.03891561 -0.09315817 -0.9948905 -0.03891561 0.09315817 0.9948905 -0.003965062 0.01370047 -0.9998983 0.003965062 -0.01370047 0.9998983 -0.05292366 0.1501864 -0.9872402 0.05292366 -0.1501864 0.9872402 -0.07677473 0.2629705 -0.9617443 0.07677473 -0.2629705 0.9617443 -0.05459441 0.3206562 -0.945621 0.05459441 -0.3206562 0.945621 0.004760152 0.3221679 -0.9466706 -0.004760152 -0.3221679 0.9466706 0.04876775 0.2880265 -0.9563799 -0.04876775 -0.2880265 0.9563799 -0.3808499 -0.1922218 -0.9044358 0.3808499 0.1922218 0.9044358 0.03344697 -0.1802524 -0.9830516 -0.03344697 0.1802524 0.9830516 -0.002763778 -0.1514613 -0.9884593 0.002763778 0.1514613 0.9884593 -0.04324458 -0.1143914 -0.9924941 0.04324458 0.1143914 0.9924941 -0.084533 -0.0720665 -0.9938111 0.084533 0.0720665 0.9938111 -0.1197977 -0.03169386 -0.9922923 0.1197977 0.03169386 0.9922923 -0.1426846 -0.0002456351 -0.9897682 0.1426846 0.0002456351 0.9897682 -0.1569382 0.02588106 -0.9872692 0.1569382 -0.02588106 0.9872692 -0.1613096 0.04492413 -0.9858808 0.1613096 -0.04492413 0.9858808 -0.09920143 0.03730343 -0.9943679 0.09920143 -0.03730343 0.9943679 0.04803669 0.002806228 -0.9988416 -0.04803669 -0.002806228 0.9988416 0.05483698 0.01524984 -0.9983789 -0.05483698 -0.01524984 0.9983789 0.05543354 0.01343731 -0.998372 -0.05543354 -0.01343731 0.998372 0.05390944 0.0123775 -0.9984691 -0.05390944 -0.0123775 0.9984691 0.0503637 0.01172906 -0.9986621 -0.0503637 -0.01172906 0.9986621 0.04497037 0.01094766 -0.9989283 -0.04497037 -0.01094766 0.9989283 0.03808704 0.009202205 -0.9992321 -0.03808704 -0.009202205 0.9992321 0.03176457 0.003436501 -0.9994895 -0.03176457 -0.003436501 0.9994895 0.02569171 -0.01134126 -0.9996056 -0.02569171 0.01134126 0.9996056 0.02029894 -0.03115671 -0.9993084 -0.02029894 0.03115671 0.9993084 0.01477377 -0.05037954 -0.9986209 -0.01477377 0.05037954 0.9986209 0.05343882 0.2398938 -0.9693272 -0.05343882 -0.2398938 0.9693272 0.01266878 0.1400046 -0.9900698 -0.01266878 -0.1400046 0.9900698 -0.03016624 0.03288137 -0.9990039 0.03016624 -0.03288137 0.9990039 -0.06545783 -0.04960272 -0.9966217 0.06545783 0.04960272 0.9966217 -0.08420854 -0.08459736 -0.9928505 0.08420854 0.08459736 0.9928505 -0.1211099 -0.1194012 -0.9854318 0.1211099 0.1194012 0.9854318 -0.1909163 -0.1657517 -0.9675109 0.1909163 0.1657517 0.9675109 -0.2631192 -0.1844077 -0.9469752 0.2631192 0.1844077 0.9469752 -0.3203207 -0.1806349 -0.9299278 0.3203207 0.1806349 0.9299278 -0.3582268 -0.1643936 -0.9190475 0.3582268 0.1643936 0.9190475 0.06267597 -0.1987174 -0.9780507 -0.06267597 0.1987174 0.9780507 0.06442271 -0.205898 -0.9764506 -0.06442271 0.205898 0.9764506 0.06561253 -0.2052713 -0.9765033 -0.06561253 0.2052713 0.9765033 0.06621689 -0.1968782 -0.9781893 -0.06621689 0.1968782 0.9781893 0.06619977 -0.1809558 -0.9812607 -0.06619977 0.1809558 0.9812607 0.06552098 -0.1579632 -0.9852688 -0.06552098 0.1579632 0.9852688 0.06414347 -0.1286243 -0.9896168 -0.06414347 0.1286243 0.9896168 0.0620436 -0.09396367 -0.9936405 -0.0620436 0.09396367 0.9936405 0.05922232 -0.05531673 -0.996711 -0.05922232 0.05531673 0.996711 0.05574493 -0.01433062 -0.9983422 -0.05574493 0.01433062 0.9983422 0.17367 0.006317678 -0.9847836 -0.17367 -0.006317678 0.9847836 0.2140338 -0.02867043 -0.9764054 -0.2140338 0.02867043 0.9764054 0.2032298 -0.06342383 -0.9770747 -0.2032298 0.06342383 0.9770747 0.138749 -0.09991994 -0.9852739 -0.138749 0.09991994 0.9852739 0.03877227 -0.1316188 -0.9905419 -0.03877227 0.1316188 0.9905419 -0.08232384 -0.1538909 -0.9846524 0.08232384 0.1538909 0.9846524 -0.2075001 -0.1607487 -0.9649371 0.2075001 0.1607487 0.9649371 -0.3131757 -0.1578979 -0.936477 0.3131757 0.1578979 0.936477 -0.378548 -0.1502083 -0.913312 0.378548 0.1502083 0.913312 -0.4006403 -0.140837 -0.9053465 0.4006403 0.140837 0.9053465 0.05209989 0.01793348 -0.9984808 -0.05209989 -0.01793348 0.9984808 0.02483218 -0.06315913 -0.9976945 -0.02483218 0.06315913 0.9976945 0.06240902 -0.1184673 -0.9909948 -0.06240902 0.1184673 0.9909948 0.08696638 -0.1342763 -0.9871204 -0.08696638 0.1342763 0.9871204 0.08487294 -0.1080169 -0.9905195 -0.08487294 0.1080169 0.9905195 0.0638435 -0.03633881 -0.9972981 -0.0638435 0.03633881 0.9972981 0.02475048 0.0883771 -0.9957795 -0.02475048 -0.0883771 0.9957795 -0.007138061 0.2243192 -0.9744896 0.007138061 -0.2243192 0.9744896 -0.003384258 0.3194607 -0.9475935 0.003384258 -0.3194607 0.9475935 0.04046919 0.3581575 -0.9327837 -0.04046919 -0.3581575 0.9327837 0.08321324 0.351282 -0.9325645 -0.08321324 -0.351282 0.9325645 0.09011564 0.3220916 -0.9424098 -0.09011564 -0.3220916 0.9424098 -0.379405 -0.1313093 -0.9158656 0.379405 0.1313093 0.9158656 0.06039409 -0.183882 -0.9810912 -0.06039409 0.183882 0.9810912 0.02824194 -0.1576973 -0.9870836 -0.02824194 0.1576973 0.9870836 -0.01011527 -0.1223083 -0.9924406 0.01011527 0.1223083 0.9924406 -0.05160711 -0.0803396 -0.9954307 0.05160711 0.0803396 0.9954307 -0.09174659 -0.03636321 -0.9951182 0.09174659 0.03636321 0.9951182 -0.1216309 -0.0003118256 -0.9925753 0.1216309 0.0003118256 0.9925753 -0.1416274 0.02808791 -0.9895215 0.1416274 -0.02808791 0.9895215 -0.1534901 0.05091111 -0.9868378 0.1534901 -0.05091111 0.9868378 -0.1484911 0.06377853 -0.986855 0.1484911 -0.06377853 0.986855 -0.05760226 0.05048303 -0.9970624 0.05760226 -0.05048303 0.9970624 0.09142359 0.04194606 -0.9949283 -0.09142359 -0.04194606 0.9949283 0.04932788 0.03293331 -0.9982395 -0.04932788 -0.03293331 0.9982395 0.0520678 0.02981658 -0.9981983 -0.0520678 -0.02981658 0.9981983 0.05270318 0.02642645 -0.9982605 -0.05270318 -0.02642645 0.9982605 0.05119946 0.0228378 -0.9984273 -0.05119946 -0.0228378 0.9984273 0.0476041 0.0191284 -0.9986831 -0.0476041 -0.0191284 0.9986831 0.04204543 0.0153772 -0.9989974 -0.04204543 -0.0153772 0.9989974 0.03472885 0.01166263 -0.9993287 -0.03472885 -0.01166263 0.9993287 0.02593022 0.008060754 -0.9996313 -0.02593022 -0.008060754 0.9996313 0.01667021 0.003959315 -0.9998532 -0.01667021 -0.003959315 0.9998532 0.007585102 -0.002843862 -0.9999672 -0.007585102 0.002843862 0.9999672 0.002101886 -0.0109889 -0.9999374 -0.002101886 0.0109889 0.9999374 0.08151688 0.2747723 -0.9580476 -0.08151688 -0.2747723 0.9580476 0.04564015 0.1799711 -0.9826125 -0.04564015 -0.1799711 0.9826125 0.004949971 0.07525954 -0.9971517 -0.004949971 -0.07525954 0.9971517 -0.0358843 -0.01868175 -0.9991813 0.0358843 0.01868175 0.9991813 -0.06264812 -0.06486435 -0.9959256 0.06264812 0.06486435 0.9959256 -0.08456621 -0.0766685 -0.9934639 0.08456621 0.0766685 0.9934639 -0.1289358 -0.09412609 -0.9871757 0.1289358 0.09412609 0.9871757 -0.1951832 -0.1163343 -0.9738428 0.1951832 0.1163343 0.9738428 -0.2533014 -0.1315211 -0.9584052 0.2533014 0.1315211 0.9584052 -0.2926548 -0.1357862 -0.946528 0.2926548 0.1357862 0.946528 -0.3144246 -0.1220133 -0.9414085 0.3144246 0.1220133 0.9414085 0.08411743 -0.1991761 -0.9763468 -0.08411743 0.1991761 0.9763468 0.08303908 -0.2056819 -0.9750895 -0.08303908 0.2056819 0.9750895 0.0812958 -0.2043925 -0.9755074 -0.0812958 0.2043925 0.9755074 0.07890847 -0.1953464 -0.9775547 -0.07890847 0.1953464 0.9775547 0.07588903 -0.1787792 -0.9809581 -0.07588903 0.1787792 0.9809581 0.07224668 -0.1551534 -0.9852451 -0.07224668 0.1551534 0.9852451 0.06799744 -0.1252025 -0.9897983 -0.06799744 0.1252025 0.9897983 0.06317511 -0.0899689 -0.9939389 -0.06317511 0.0899689 0.9939389 0.05784114 -0.05081137 -0.9970319 -0.05784114 0.05081137 0.9970319 0.05222242 -0.009628672 -0.9985891 -0.05222242 0.009628672 0.9985891 0.04728316 0.0213757 -0.9986528 -0.04728316 -0.0213757 0.9986528 0.1980082 0.07478361 -0.9773434 -0.1980082 -0.07478361 0.9773434 0.2377647 0.05825402 -0.9695744 -0.2377647 -0.05825402 0.9695744 0.2300661 0.04118559 -0.9723031 -0.2300661 -0.04118559 0.9723031 0.1684551 0.01841903 -0.9855372 -0.1684551 -0.01841903 0.9855372 0.0650465 -0.00652102 -0.9978609 -0.0650465 0.00652102 0.9978609 -0.06592801 -0.02801853 -0.9974309 0.06592801 0.02801853 0.9974309 -0.2014748 -0.04151338 -0.9786136 0.2014748 0.04151338 0.9786136 -0.3120298 -0.05228784 -0.9486324 0.3120298 0.05228784 0.9486324 -0.3801004 -0.06240802 -0.9228374 0.3801004 0.06240802 0.9228374 -0.4034522 -0.07265012 -0.912112 0.4034522 0.07265012 0.912112 -0.3825468 -0.08360129 -0.9201461 0.3825468 0.08360129 0.9201461 0.04459856 0.03570634 -0.9983667 -0.04459856 -0.03570634 0.9983667 0.01937096 -0.01614559 -0.999682 -0.01937096 0.01614559 0.999682 0.07044347 -0.0633785 -0.9955003 -0.07044347 0.0633785 0.9955003 0.1128499 -0.101492 -0.988415 -0.1128499 0.101492 0.988415 0.1265872 -0.09990168 -0.986912 -0.1265872 0.09990168 0.986912 0.1203611 -0.05914669 -0.9909666 -0.1203611 0.05914669 0.9909666 0.1004246 0.03110326 -0.9944584 -0.1004246 -0.03110326 0.9944584 0.07117545 0.166243 -0.9835127 -0.07117545 -0.166243 0.9835127 0.05910137 0.290579 -0.955024 -0.05910137 -0.290579 0.955024 0.0836189 0.3643343 -0.9275066 -0.0836189 -0.3643343 0.9275066 0.1167377 0.3878656 -0.9142935 -0.1167377 -0.3878656 0.9142935 0.1224019 0.3808878 -0.9164836 -0.1224019 -0.3808878 0.9164836 0.1113962 0.3512136 -0.929645 -0.1113962 -0.3512136 0.929645 -0.3174131 -0.09522387 -0.9434942 0.3174131 0.09522387 0.9434942 0.08449885 -0.1850277 -0.9790938 -0.08449885 0.1850277 0.9790938 0.05759658 -0.1617628 -0.9851474 -0.05759658 0.1617628 0.9851474 0.02285134 -0.1285636 -0.991438 -0.02285134 0.1285636 0.991438 -0.01715352 -0.08768658 -0.9960004 0.01715352 0.08768658 0.9960004 -0.05888191 -0.04244214 -0.9973623 0.05888191 0.04244214 0.9973623 -0.09527659 -0.001192381 -0.9954501 0.09527659 0.001192381 0.9954501 -0.1208298 0.03001363 -0.9922194 0.1208298 -0.03001363 0.9922194 -0.1385623 0.05521731 -0.9888132 0.1385623 -0.05521731 0.9888132 -0.1478824 0.07372954 -0.9862529 0.1478824 -0.07372954 0.9862529 -0.1280883 0.08180044 -0.9883836 0.1280883 -0.08180044 0.9883836 -0.02142206 0.07494116 -0.9969578 0.02142206 -0.07494116 0.9969578 0.1157497 0.09110771 -0.9890912 -0.1157497 -0.09110771 0.9890912 0.04175028 0.04175028 -0.9982554 -0.04175028 -0.04175028 0.9982554 0.04606355 0.03841129 -0.9981997 -0.04606355 -0.03841129 0.9981997 0.0484623 0.03465945 -0.9982235 -0.0484623 -0.03465945 0.9982235 0.04883746 0.03058041 -0.9983385 -0.04883746 -0.03058041 0.9983385 0.04715863 0.02626546 -0.998542 -0.04715863 -0.02626546 0.998542 0.04347556 0.02180949 -0.9988164 -0.04347556 -0.02180949 0.9988164 0.03791692 0.01730884 -0.999131 -0.03791692 -0.01730884 0.999131 0.03068648 0.01285912 -0.9994463 -0.03068648 -0.01285912 0.9994463 0.02205636 0.008552894 -0.9997201 -0.02205636 -0.008552894 0.9997201 0.01235732 0.004477344 -0.9999136 -0.01235732 -0.004477344 0.9999136 0.003458982 0.001017412 -0.9999935 -0.003458982 -0.001017412 0.9999935 7.925836e-005 2.641945e-005 -1 -7.925836e-005 -2.641945e-005 1 0.09138749 0.3042782 -0.9481894 -0.09138749 -0.3042782 0.9481894 0.06110526 0.2158788 -0.9745063 -0.06110526 -0.2158788 0.9745063 0.02441177 0.1159721 -0.9929524 -0.02441177 -0.1159721 0.9929524 -0.01558797 0.01808112 -0.999715 0.01558797 -0.01808112 0.999715 -0.04825516 -0.03942837 -0.9980565 0.04825516 0.03942837 0.9980565 -0.07304356 -0.05351896 -0.9958917 0.07304356 0.05351896 0.9958917 -0.09520647 -0.05382051 -0.9940015 0.09520647 0.05382051 0.9940015 -0.1245047 -0.06339816 -0.9901915 0.1245047 0.06339816 0.9901915 -0.1572207 -0.08634419 -0.9837817 0.1572207 0.08634419 0.9837817 -0.1882564 -0.1076575 -0.9762015 0.1882564 0.1076575 0.9762015 -0.2079235 -0.1127005 -0.9716308 0.2079235 0.1127005 0.9716308 -0.2105113 -0.106453 -0.9717781 0.2105113 0.106453 0.9717781 0.1018543 -0.1970055 -0.9750972 -0.1018543 0.1970055 0.9750972 0.0980148 -0.2027053 -0.9743222 -0.0980148 0.2027053 0.9743222 0.09340902 -0.2006681 -0.9751959 -0.09340902 0.2006681 0.9751959 0.08810631 -0.1909287 -0.9776418 -0.08810631 0.1909287 0.9776418 0.08216767 -0.1737232 -0.9813607 -0.08216767 0.1737232 0.9813607 0.07565457 -0.1495213 -0.9858599 -0.07565457 0.1495213 0.9858599 0.06863958 -0.1190719 -0.9905102 -0.06863958 0.1190719 0.9905102 0.0612171 -0.0834398 -0.9946307 -0.0612171 0.0834398 0.9946307 0.05351174 -0.04400941 -0.9975969 -0.05351174 0.04400941 0.9975969 0.04603089 -0.00345276 -0.998934 -0.04603089 0.00345276 0.998934 0.0405277 0.02523723 -0.9988596 -0.0405277 -0.02523723 0.9988596 0.03807166 0.03807166 -0.9985495 -0.03807166 -0.03807166 0.9985495 0.201681 0.1401876 -0.9693669 -0.201681 -0.1401876 0.9693669 0.2419659 0.1424602 -0.9597696 -0.2419659 -0.1424602 0.9597696 0.2383438 0.1432027 -0.9605651 -0.2383438 -0.1432027 0.9605651 0.1858963 0.137355 -0.9729215 -0.1858963 -0.137355 0.9729215 0.085125 0.1221184 -0.9888583 -0.085125 -0.1221184 0.9888583 -0.0472276 0.1049443 -0.9933561 0.0472276 -0.1049443 0.9933561 -0.1849887 0.08406975 -0.9791381 0.1849887 -0.08406975 0.9791381 -0.2952074 0.05878306 -0.9536232 0.2952074 -0.05878306 0.9536232 -0.3634784 0.0305153 -0.9311027 0.3634784 -0.0305153 0.9311027 -0.3870109 0.0001447951 -0.9220751 0.3870109 -0.0001447951 0.9220751 -0.3658167 -0.03174004 -0.9301455 0.3658167 0.03174004 0.9301455 -0.3001999 -0.06445665 -0.951696 0.3001999 0.06445665 0.951696 0.03570634 0.04459856 -0.9983667 -0.03570634 -0.04459856 0.9983667 0.01813552 -0.0003111073 -0.9998355 -0.01813552 0.0003111073 0.9998355 0.07395811 -0.01700156 -0.9971164 -0.07395811 0.01700156 0.9971164 0.1327934 -0.05205242 -0.989776 -0.1327934 0.05205242 0.989776 0.1633496 -0.07238029 -0.9839095 -0.1633496 0.07238029 0.9839095 0.1708779 -0.05482093 -0.9837659 -0.1708779 0.05482093 0.9837659 0.1648011 -0.0008303322 -0.9863265 -0.1648011 0.0008303322 0.9863265 0.1490386 0.1033948 -0.9834109 -0.1490386 -0.1033948 0.9834109 0.1306423 0.2389559 -0.962202 -0.1306423 -0.2389559 0.962202 0.1356293 0.343256 -0.9293977 -0.1356293 -0.343256 0.9293977 0.152456 0.3976296 -0.9047916 -0.152456 -0.3976296 0.9047916 0.1512307 0.4144145 -0.8974352 -0.1512307 -0.4144145 0.8974352 0.1357404 0.4049603 -0.9042023 -0.1357404 -0.4049603 0.9042023 0.1146602 0.3743683 -0.9201638 -0.1146602 -0.3743683 0.9201638 -0.194177 -0.09560382 -0.9762967 0.194177 0.09560382 0.9762967 0.1048449 -0.183726 -0.9773701 -0.1048449 0.183726 0.9773701 0.08414283 -0.163602 -0.9829315 -0.08414283 0.163602 0.9829315 0.05430829 -0.1329911 -0.9896282 -0.05430829 0.1329911 0.9896282 0.01737973 -0.09380944 -0.9954385 -0.01737973 0.09380944 0.9954385 -0.02369875 -0.04886307 -0.9985243 0.02369875 0.04886307 0.9985243 -0.06406935 -0.003762891 -0.9979384 0.06406935 0.003762891 0.9979384 -0.0952262 0.03158045 -0.9949546 0.0952262 -0.03158045 0.9949546 -0.1182264 0.05895905 -0.9912348 0.1182264 -0.05895905 0.9912348 -0.1335923 0.0799987 -0.9878023 0.1335923 -0.0799987 0.9878023 -0.1402816 0.09336632 -0.9856997 0.1402816 -0.09336632 0.9856997 -0.1080187 0.09943315 -0.9891638 0.1080187 -0.09943315 0.9891638 0.0006767316 0.1057846 -0.9943888 -0.0006767316 -0.1057846 0.9943888 0.1195685 0.1369831 -0.9833306 -0.1195685 -0.1369831 0.9833306 0.03367559 0.04669905 -0.9983412 -0.03367559 -0.04669905 0.9983412 0.03930459 0.04347345 -0.9982811 -0.03930459 -0.04347345 0.9982811 0.04326025 0.03973948 -0.9982732 -0.04326025 -0.03973948 0.9982732 0.04536592 0.03558433 -0.9983365 -0.04536592 -0.03558433 0.9983365 0.04551813 0.03110308 -0.9984792 -0.04551813 -0.03110308 0.9984792 0.04369046 0.02639642 -0.9986963 -0.04369046 -0.02639642 0.9986963 0.03993483 0.02156824 -0.9989695 -0.03993483 -0.02156824 0.9989695 0.03438031 0.0167233 -0.9992689 -0.03438031 -0.0167233 0.9992689 0.02722915 0.01196478 -0.9995576 -0.02722915 -0.01196478 0.9995576 0.01874997 0.007391744 -0.9997969 -0.01874997 -0.007391744 0.9997969 0.009304007 0.003132471 -0.9999518 -0.009304007 -0.003132471 0.9999518 0.00180392 0.0001585163 -0.9999984 -0.00180392 -0.0001585163 0.9999984 0 0 -1 -0 -0 1 0.08286781 0.3282284 -0.9409565 -0.08286781 -0.3282284 0.9409565 0.05853575 0.2466823 -0.967327 -0.05853575 -0.2466823 0.967327 0.02716063 0.1526628 -0.987905 -0.02716063 -0.1526628 0.987905 -0.009061516 0.05544751 -0.9984205 0.009061516 -0.05544751 0.9984205 -0.04299724 -0.01000539 -0.9990251 0.04299724 0.01000539 0.9990251 -0.06850085 -0.02994916 -0.9972014 0.06850085 0.02994916 0.9972014 -0.0871427 -0.03766547 -0.9954835 0.0871427 0.03766547 0.9954835 -0.09368356 -0.04998747 -0.9943463 0.09368356 0.04998747 0.9943463 -0.09585745 -0.07040022 -0.9929024 0.09585745 0.07040022 0.9929024 -0.1018834 -0.09067467 -0.9906553 0.1018834 0.09067467 0.9906553 -0.107958 -0.1052349 -0.98857 0.107958 0.1052349 0.98857 -0.1077908 -0.1142479 -0.9875872 0.1077908 0.1142479 0.9875872 -0.09850407 -0.1183143 -0.9880783 0.09850407 0.1183143 0.9880783 0.1152062 -0.1923083 -0.9745486 -0.1152062 0.1923083 0.9745486 0.1087251 -0.1970794 -0.9743401 -0.1087251 0.1970794 0.9743401 0.1013825 -0.1942158 -0.9757058 -0.1013825 0.1942158 0.9757058 0.09329629 -0.1837494 -0.9785356 -0.09329629 0.1837494 0.9785356 0.08457916 -0.1659204 -0.9825054 -0.08457916 0.1659204 0.9825054 0.07534927 -0.1412115 -0.9871078 -0.07534927 0.1412115 0.9871078 0.06574018 -0.1103917 -0.9917116 -0.06574018 0.1103917 0.9917116 0.05590881 -0.07455183 -0.9956486 -0.05590881 0.07455183 0.9956486 0.04604319 -0.03510401 -0.9983225 -0.04604319 0.03510401 0.9983225 0.0370868 0.003481583 -0.999306 -0.0370868 -0.003481583 0.999306 0.03205085 0.02896869 -0.9990663 -0.03205085 -0.02896869 0.9990663 0.0300091 0.03997318 -0.99875 -0.0300091 -0.03997318 0.99875 0.02818391 0.04688811 -0.9985025 -0.02818391 -0.04688811 0.9985025 0.1864037 0.1877519 -0.9643666 -0.1864037 -0.1877519 0.9643666 0.2285162 0.2066296 -0.9513593 -0.2285162 -0.2066296 0.9513593 0.2293269 0.2223742 -0.9476069 -0.2293269 -0.2223742 0.9476069 0.1881611 0.2324532 -0.9542332 -0.1881611 -0.2324532 0.9542332 0.09719689 0.2309484 -0.968099 -0.09719689 -0.2309484 0.968099 -0.02823703 0.2196562 -0.9751686 0.02823703 -0.2196562 0.9751686 -0.1575653 0.1945954 -0.9681455 0.1575653 -0.1945954 0.9681455 -0.2626711 0.1571567 -0.9520009 0.2626711 -0.1571567 0.9520009 -0.3289615 0.1125074 -0.9376174 0.3289615 -0.1125074 0.9376174 -0.3511785 0.06418039 -0.9341063 0.3511785 -0.06418039 0.9341063 -0.328907 0.01312787 -0.9442711 0.328907 -0.01312787 0.9442711 -0.2627427 -0.03902275 -0.9640765 0.2627427 0.03902275 0.9640765 -0.1616928 -0.08690174 -0.9830074 0.1616928 0.08690174 0.9830074 0.0266167 0.04933934 -0.9984274 -0.0266167 -0.04933934 0.9984274 0.01609116 -0.001734812 -0.999869 -0.01609116 0.001734812 0.999869 0.07213237 -0.004182905 -0.9973863 -0.07213237 0.004182905 0.9973863 0.1442186 -0.01344146 -0.9894546 -0.1442186 0.01344146 0.9894546 0.1925706 -0.03107823 -0.9807909 -0.1925706 0.03107823 0.9807909 0.2151256 -0.03356407 -0.9760094 -0.2151256 0.03356407 0.9760094 0.2202002 -0.002631497 -0.9754511 -0.2202002 0.002631497 0.9754511 0.2168537 0.06239875 -0.9742078 -0.2168537 -0.06239875 0.9742078 0.2046331 0.1745665 -0.9631468 -0.2046331 -0.1745665 0.9631468 0.1946542 0.2985017 -0.9343481 -0.1946542 -0.2985017 0.9343481 0.1914975 0.3811865 -0.9044476 -0.1914975 -0.3811865 0.9044476 0.1780448 0.4226854 -0.8886153 -0.1780448 -0.4226854 0.8886153 0.1560338 0.4348064 -0.8869029 -0.1560338 -0.4348064 0.8869029 0.131824 0.4228994 -0.896537 -0.131824 -0.4228994 0.896537 0.09990306 0.3919417 -0.9145497 -0.09990306 -0.3919417 0.9145497 -0.08115685 -0.1182 -0.9896678 0.08115685 0.1182 0.9896678 0.1206953 -0.1800662 -0.9762217 -0.1206953 0.1800662 0.9762217 0.1068905 -0.1632324 -0.9807801 -0.1068905 0.1632324 0.9807801 0.08300787 -0.1355156 -0.9872919 -0.08300787 0.1355156 0.9872919 0.05057029 -0.09849879 -0.9938514 -0.05057029 0.09849879 0.9938514 0.01193849 -0.05471192 -0.9984308 -0.01193849 0.05471192 0.9984308 -0.02928471 -0.007728178 -0.9995412 0.02928471 0.007728178 0.9995412 -0.06544828 0.0323451 -0.9973316 0.06544828 -0.0323451 0.9973316 -0.0931842 0.06203402 -0.9937145 0.0931842 -0.06203402 0.9937145 -0.1140229 0.08542855 -0.9897983 0.1140229 -0.08542855 0.9897983 -0.1268744 0.1014129 -0.986721 0.1268744 -0.1014129 0.986721 -0.1309655 0.1105922 -0.9851992 0.1309655 -0.1105922 0.9851992 -0.09396831 0.1168897 -0.9886894 0.09396831 -0.1168897 0.9886894 0.004902002 0.1327945 -0.9911315 -0.004902002 -0.1327945 0.9911315 0.1043125 0.1670384 -0.9804168 -0.1043125 -0.1670384 0.9804168 0.05812455 0.346239 -0.936344 -0.05812455 -0.346239 0.936344 0.04003092 0.2713879 -0.9616372 -0.04003092 -0.2713879 0.9616372 0.01503674 0.1835325 -0.9828986 -0.01503674 -0.1835325 0.9828986 -0.01521731 0.08947623 -0.9958727 0.01521731 -0.08947623 0.9958727 -0.04502875 0.01842958 -0.9988157 0.04502875 -0.01842958 0.9988157 -0.0641528 -0.01048017 -0.9978851 0.0641528 0.01048017 0.9978851 -0.0739885 -0.0284538 -0.9968531 0.0739885 0.0284538 0.9968531 -0.0750772 -0.05029923 -0.9959083 0.0750772 0.05029923 0.9959083 -0.0726332 -0.0726332 -0.9947104 0.0726332 0.0726332 0.9947104 -0.06917097 -0.09213807 -0.9933408 0.06917097 0.09213807 0.9933408 -0.06489676 -0.1079651 -0.9920342 0.06489676 0.1079651 0.9920342 -0.05992173 -0.1195302 -0.9910206 0.05992173 0.1195302 0.9910206 -0.05436448 -0.1264151 -0.9904866 0.05436448 0.1264151 0.9904866 -0.04834725 -0.1283694 -0.9905472 0.04834725 0.1283694 0.9905472 0.1236642 -0.1852229 -0.9748844 -0.1236642 0.1852229 0.9748844 0.1147044 -0.188952 -0.9752641 -0.1147044 0.188952 0.9752641 0.1047967 -0.1851916 -0.9770986 -0.1047967 0.1851916 0.9770986 0.09410819 -0.1739741 -0.9802432 -0.09410819 0.1739741 0.9802432 0.08280675 -0.1555502 -0.9843511 -0.08280675 0.1555502 0.9843511 0.07107282 -0.1304222 -0.9889078 -0.07107282 0.1304222 0.9889078 0.0591071 -0.09938136 -0.9932924 -0.0591071 0.09938136 0.9932924 0.04712816 -0.063543 -0.9968657 -0.04712816 0.063543 0.9968657 0.03543246 -0.02440427 -0.9990741 -0.03543246 0.02440427 0.9990741 0.0255855 0.01030939 -0.9996195 -0.0255855 -0.01030939 0.9996195 0.02213973 0.03183891 -0.9992478 -0.02213973 -0.03183891 0.9992478 0.02073385 0.0413642 -0.998929 -0.02073385 -0.0413642 0.998929 0.01949447 0.04856273 -0.9986299 -0.01949447 -0.04856273 0.9986299 0.01842929 0.05132988 -0.9985117 -0.01842929 -0.05132988 0.9985117 0.1556871 0.2081547 -0.9656258 -0.1556871 -0.2081547 0.9656258 0.2012463 0.2403504 -0.9495955 -0.2012463 -0.2403504 0.9495955 0.2079891 0.2677529 -0.9407704 -0.2079891 -0.2677529 0.9407704 0.1750453 0.2897982 -0.9409443 -0.1750453 -0.2897982 0.9409443 0.09931425 0.3032364 -0.9477259 -0.09931425 -0.3032364 0.9477259 -0.01044667 0.2976561 -0.954616 0.01044667 -0.2976561 0.954616 -0.1233808 0.2718083 -0.9544095 0.1233808 -0.2718083 0.9544095 -0.2171608 0.2280826 -0.9491151 0.2171608 -0.2280826 0.9491151 -0.2780633 0.1714307 -0.9451414 0.2780633 -0.1714307 0.9451414 -0.2978343 0.1077398 -0.9485183 0.2978343 -0.1077398 0.9485183 -0.2734749 0.04078401 -0.9610141 0.2734749 -0.04078401 0.9610141 -0.2069767 -0.02715435 -0.977969 0.2069767 0.02715435 0.977969 -0.1204768 -0.08600684 -0.9889834 0.1204768 0.08600684 0.9889834 -0.06075337 -0.1146904 -0.9915418 0.06075337 0.1146904 0.9915418 0.06714377 -0.008977988 -0.9977029 -0.06714377 0.008977988 0.9977029 0.1430455 -0.009482786 -0.9896707 -0.1430455 0.009482786 0.9896707 0.2078981 -0.006984641 -0.9781255 -0.2078981 0.006984641 0.9781255 0.2496274 -0.004754049 -0.9683303 -0.2496274 0.004754049 0.9683303 0.2676557 0.01117943 -0.9634498 -0.2676557 -0.01117943 0.9634498 0.2721498 0.05188919 -0.9608548 -0.2721498 -0.05188919 0.9608548 0.2702815 0.1241059 -0.954749 -0.2702815 -0.1241059 0.954749 0.254999 0.235991 -0.9377013 -0.254999 -0.235991 0.9377013 0.2324485 0.3396588 -0.9113724 -0.2324485 -0.3396588 0.9113724 0.2041565 0.4060436 -0.8907574 -0.2041565 -0.4060436 0.8907574 0.1740468 0.4408504 -0.8805445 -0.1740468 -0.4408504 0.8805445 0.1445799 0.449041 -0.8817363 -0.1445799 -0.449041 0.8817363 0.1106716 0.4353669 -0.8934246 -0.1106716 -0.4353669 0.8934246 0.06919099 0.4039036 -0.9121812 -0.06919099 -0.4039036 0.9121812 -0.04199374 -0.1253106 -0.9912284 0.04199374 0.1253106 0.9912284 0.1314994 -0.1741735 -0.9758952 -0.1314994 0.1741735 0.9758952 0.1250467 -0.1607238 -0.9790461 -0.1250467 0.1607238 0.9790461 0.1078883 -0.1361339 -0.9847983 -0.1078883 0.1361339 0.9847983 0.08106202 -0.1016763 -0.9915094 -0.08106202 0.1016763 0.9915094 0.04645659 -0.05951995 -0.9971455 -0.04645659 0.05951995 0.9971455 0.006728127 -0.01264729 -0.9998974 -0.006728127 0.01264729 0.9998974 -0.03265123 0.03140531 -0.9989733 0.03265123 -0.03140531 0.9989733 -0.06434923 0.06434923 -0.9958506 0.06434923 -0.06434923 0.9958506 -0.08993364 0.08968059 -0.9919019 0.08993364 -0.08968059 0.9919019 -0.1086641 0.1079614 -0.9881986 0.1086641 -0.1079614 0.9881986 -0.1191145 0.1194141 -0.9856734 0.1191145 -0.1194141 0.9856734 -0.1209184 0.1267028 -0.9845431 0.1209184 -0.1267028 0.9845431 -0.08717794 0.1307014 -0.9875815 0.08717794 -0.1307014 0.9875815 -0.006921259 0.1463004 -0.989216 0.006921259 -0.1463004 0.989216 0.07325756 0.1731877 -0.9821606 -0.07325756 -0.1731877 0.9821606 0.02144633 0.3577234 -0.9335813 -0.02144633 -0.3577234 0.9335813 0.01013303 0.2888713 -0.9573143 -0.01013303 -0.2888713 0.9573143 -0.006382854 0.2063609 -0.9784551 0.006382854 -0.2063609 0.9784551 -0.02513247 0.1152931 -0.9930135 0.02513247 -0.1152931 0.9930135 -0.04212245 0.03896635 -0.9983523 0.04212245 -0.03896635 0.9983523 -0.05120105 -0.001826744 -0.9986867 0.05120105 0.001826744 0.9986867 -0.05290665 -0.02668032 -0.998243 0.05290665 0.02668032 0.998243 -0.05193928 -0.05193928 -0.9972987 0.05193928 0.05193928 0.9972987 -0.05019189 -0.07518463 -0.9959056 0.05019189 0.07518463 0.9959056 -0.04782746 -0.0954162 -0.9942878 0.04782746 0.0954162 0.9942878 -0.04491212 -0.1118809 -0.9927062 0.04491212 0.1118809 0.9927062 -0.04152115 -0.1239861 -0.9914149 0.04152115 0.1239861 0.9914149 -0.03773519 -0.1313093 -0.990623 0.03773519 0.1313093 0.990623 -0.03363715 -0.1335985 -0.9904645 0.03363715 0.1335985 0.9904645 -0.02931054 -0.1307705 -0.9909793 0.02931054 0.1307705 0.9909793 0.1268911 -0.1759121 -0.9761934 -0.1268911 0.1759121 0.9761934 0.1156513 -0.1784987 -0.9771197 -0.1156513 0.1784987 0.9771197 0.1033909 -0.1737839 -0.9793414 -0.1033909 0.1737839 0.9793414 0.09032816 -0.1618066 -0.9826797 -0.09032816 0.1618066 0.9826797 0.07668877 -0.1428355 -0.9867709 -0.07668877 0.1428355 0.9867709 0.06271872 -0.1174011 -0.9911021 -0.06271872 0.1174011 0.9911021 0.0486958 -0.08632356 -0.9950764 -0.0486958 0.08632356 0.9950764 0.03490555 -0.05071025 -0.9981032 -0.03490555 0.05071025 0.9981032 0.02221579 -0.01288365 -0.9996702 -0.02221579 0.01288365 0.9996702 0.01319405 0.01645603 -0.9997775 -0.01319405 -0.01645603 0.9997775 0.0111935 0.03312868 -0.9993884 -0.0111935 -0.03312868 0.9993884 0.01061856 0.04220869 -0.9990524 -0.01061856 -0.04220869 0.9990524 0.009997462 0.04957998 -0.9987201 -0.009997462 -0.04957998 0.9987201 0.009461234 0.05262092 -0.9985697 -0.009461234 -0.05262092 0.9985697 0.1136239 0.1953722 -0.9741249 -0.1136239 -0.1953722 0.9741249 0.1639607 0.2375282 -0.9574431 -0.1639607 -0.2375282 0.9574431 0.1767907 0.2749081 -0.945077 -0.1767907 -0.2749081 0.945077 0.1504922 0.3075062 -0.9395701 -0.1504922 -0.3075062 0.9395701 0.1062513 0.3152348 -0.943047 -0.1062513 -0.3152348 0.943047 0.03107006 0.2496371 -0.9678409 -0.03107006 -0.2496371 0.9678409 -0.1141941 0.1734294 -0.9782034 0.1141941 -0.1734294 0.9782034 -0.1813325 0.2141458 -0.959823 0.1813325 -0.2141458 0.959823 -0.2146298 0.1979361 -0.9564284 0.2146298 -0.1979361 0.9564284 -0.2290958 0.1235908 -0.965526 0.2290958 -0.1235908 0.965526 -0.2028154 0.04408144 -0.9782243 0.2028154 -0.04408144 0.9782243 -0.1402408 -0.03142828 -0.9896185 0.1402408 0.03142828 0.9896185 -0.07713497 -0.08940037 -0.9930044 0.07713497 0.08940037 0.9930044 -0.04111155 -0.1193163 -0.9920048 0.04111155 0.1193163 0.9920048 -0.03148776 -0.1442695 -0.9890373 0.03148776 0.1442695 0.9890373 0.1374568 -0.01723609 -0.9903578 -0.1374568 0.01723609 0.9903578 0.2066811 -0.0151376 -0.9782913 -0.2066811 0.0151376 0.9782913 0.2627493 -0.001246907 -0.9648633 -0.2627493 0.001246907 0.9648633 0.2987836 0.01921931 -0.9541273 -0.2987836 -0.01921931 0.9541273 0.3144591 0.05175116 -0.9478593 -0.3144591 -0.05175116 0.9478593 0.3172064 0.09942975 -0.9431298 -0.3172064 -0.09942975 0.9431298 0.3050933 0.1738319 -0.9363229 -0.3050933 -0.1738319 0.9363229 0.2713589 0.2754306 -0.9222268 -0.2713589 -0.2754306 0.9222268 0.2302761 0.3647537 -0.9021794 -0.2302761 -0.3647537 0.9021794 0.1913202 0.4232105 -0.8856012 -0.1913202 -0.4232105 0.8856012 0.1547868 0.4528223 -0.8780621 -0.1547868 -0.4528223 0.8780621 0.1168446 0.4578632 -0.8813107 -0.1168446 -0.4578632 0.8813107 0.07417505 0.4424979 -0.8936967 -0.07417505 -0.4424979 0.8936967 0.0266515 0.4099114 -0.9117359 -0.0266515 -0.4099114 0.9117359 -0.0232068 -0.1624209 -0.9864486 0.0232068 0.1624209 0.9864486 0.1368889 -0.1661949 -0.9765453 -0.1368889 0.1661949 0.9765453 0.1380204 -0.1561802 -0.9780379 -0.1380204 0.1561802 0.9780379 0.1281049 -0.1348876 -0.9825449 -0.1281049 0.1348876 0.9825449 0.1077391 -0.1033223 -0.9887956 -0.1077391 0.1033223 0.9887956 0.07828688 -0.06329972 -0.9949192 -0.07828688 0.06329972 0.9949192 0.04200642 -0.01751588 -0.9989638 -0.04200642 0.01751588 0.9989638 0.002133028 0.02833094 -0.9995963 -0.002133028 -0.02833094 0.9995963 -0.034339 0.06265958 -0.997444 0.034339 -0.06265958 0.997444 -0.06659396 0.08694895 -0.9939845 0.06659396 -0.08694895 0.9939845 -0.09280195 0.1055041 -0.9900791 0.09280195 -0.1055041 0.9900791 -0.1104595 0.1185759 -0.9867819 0.1104595 -0.1185759 0.9867819 -0.1185215 0.1292115 -0.9845085 0.1185215 -0.1292115 0.9845085 -0.1157241 0.1354892 -0.9839973 0.1157241 -0.1354892 0.9839973 -0.08788108 0.1356819 -0.9868472 0.08788108 -0.1356819 0.9868472 -0.0286952 0.140404 -0.9896784 0.0286952 -0.140404 0.9896784 0.03097008 0.1503746 -0.9881439 -0.03097008 -0.1503746 0.9881439 -0.009898152 0.3619077 -0.9321614 0.009898152 -0.3619077 0.9321614 -0.01238887 0.2973624 -0.9546843 0.01238887 -0.2973624 0.9546843 -0.01725759 0.2176092 -0.9758834 0.01725759 -0.2176092 0.9758834 -0.02276303 0.1254927 -0.9918334 0.02276303 -0.1254927 0.9918334 -0.02627594 0.04527886 -0.9986288 0.02627594 -0.04527886 0.9986288 -0.02732651 -6.543011e-005 -0.9996266 0.02732651 6.543011e-005 0.9996266 -0.02712044 -0.02712044 -0.9992642 0.02712044 0.02712044 0.9992642 -0.02657289 -0.05301413 -0.9982401 0.02657289 0.05301413 0.9982401 -0.02569413 -0.07674299 -0.9967198 0.02569413 0.07674299 0.9967198 -0.02450678 -0.09741519 -0.9949421 0.02450678 0.09741519 0.9949421 -0.02304097 -0.1142682 -0.9931827 0.02304097 0.1142682 0.9931827 -0.02133351 -0.1267028 -0.9917113 0.02133351 0.1267028 0.9917113 -0.01942474 -0.1342936 -0.9907512 0.01942474 0.1342936 0.9907512 -0.01735633 -0.1367876 -0.9904483 0.01735633 0.1367876 0.9904483 -0.01517022 -0.1341014 -0.9908515 0.01517022 0.1341014 0.9908515 -0.01432232 -0.1686405 -0.9855736 0.01432232 0.1686405 0.9855736 0.1247223 -0.1645585 -0.9784502 -0.1247223 0.1645585 0.9784502 0.1114336 -0.1659204 -0.9798229 -0.1114336 0.1659204 0.9798229 0.09707359 -0.1602133 -0.9822975 -0.09707359 0.1602133 0.9822975 0.08191329 -0.1474899 -0.9856657 -0.08191329 0.1474899 0.9856657 0.0662383 -0.128044 -0.9895541 -0.0662383 0.128044 0.9895541 0.05035771 -0.1024402 -0.9934637 -0.05035771 0.1024402 0.9934637 0.03460639 -0.07153933 -0.9968373 -0.03460639 0.07153933 0.9968373 0.01939924 -0.03646597 -0.9991466 -0.01939924 0.03646597 0.9991466 0.00657056 -0.002705922 -0.9999748 -0.00657056 0.002705922 0.9999748 0.001044891 0.02101605 -0.9997786 -0.001044891 -0.02101605 0.9997786 5.644173e-005 0.03341487 -0.9994416 -5.644173e-005 -0.03341487 0.9994416 7.211189e-005 0.04248297 -0.9990972 -7.211189e-005 -0.04248297 0.9990972 8.646301e-005 0.04991316 -0.9987536 -8.646301e-005 -0.04991316 0.9987536 9.325676e-005 0.05317951 -0.998585 -9.325676e-005 -0.05317951 0.998585 0.06456559 0.14702 -0.987024 -0.06456559 -0.14702 0.987024 0.1183987 0.1968014 -0.9732682 -0.1183987 -0.1968014 0.9732682 0.1360917 0.2442779 -0.960108 -0.1360917 -0.2442779 0.960108 0.1184312 0.2849691 -0.9511922 -0.1184312 -0.2849691 0.9511922 0.1623324 0.2849614 -0.9446932 -0.1623324 -0.2849614 0.9446932 0.1817408 0.1612487 -0.9700356 -0.1817408 -0.1612487 0.9700356 -0.08969157 0.01625278 -0.995837 0.08969157 -0.01625278 0.995837 -0.2694735 0.08860148 -0.9589233 0.2694735 -0.08860148 0.9589233 -0.1914768 0.1693482 -0.966777 0.1914768 -0.1693482 0.966777 -0.1496674 0.1070956 -0.9829192 0.1496674 -0.1070956 0.9829192 -0.1226981 0.02284859 -0.992181 0.1226981 -0.02284859 0.992181 -0.07271566 -0.05499549 -0.9958353 0.07271566 0.05499549 0.9958353 -0.02952374 -0.1204445 -0.9922809 0.02952374 0.1204445 0.9922809 -0.005703195 -0.1759442 -0.9843836 0.005703195 0.1759442 0.9843836 0.004247202 -0.2242102 -0.9745316 -0.004247202 0.2242102 0.9745316 -0.0008213639 -0.259991 -0.9656107 0.0008213639 0.259991 0.9656107 0.2008502 -0.02584196 -0.9792811 -0.2008502 0.02584196 0.9792811 0.2604763 -0.02093693 -0.9652532 -0.2604763 0.02093693 0.9652532 0.3058732 0.0007509702 -0.952072 -0.3058732 -0.0007509702 0.952072 0.3333144 0.03358489 -0.9422174 -0.3333144 -0.03358489 0.9422174 0.342775 0.07634114 -0.9363105 -0.342775 -0.07634114 0.9363105 0.3349322 0.1275416 -0.9335703 -0.3349322 -0.1275416 0.9335703 0.3034849 0.2039985 -0.9307425 -0.3034849 -0.2039985 0.9307425 0.2563098 0.3002283 -0.9187863 -0.2563098 -0.3002283 0.9187863 0.2088467 0.3817667 -0.9003539 -0.2088467 -0.3817667 0.9003539 0.1640795 0.4341268 -0.8857832 -0.1640795 -0.4341268 0.8857832 0.1202204 0.4593424 -0.8800861 -0.1202204 -0.4593424 0.8800861 0.07457229 0.4614837 -0.8840089 -0.07457229 -0.4614837 0.8840089 0.02674054 0.4442035 -0.8955268 -0.02674054 -0.4442035 0.8955268 -0.0100686 0.4102892 -0.9118999 0.0100686 -0.4102892 0.9118999 -0.01345836 -0.2740692 -0.9616158 0.01345836 0.2740692 0.9616158 0.1366733 -0.1562937 -0.9782089 -0.1366733 0.1562937 0.9782089 0.1454152 -0.1497274 -0.9779755 -0.1454152 0.1497274 0.9779755 0.1430283 -0.131849 -0.9808969 -0.1430283 0.131849 0.9808969 0.1297169 -0.1034351 -0.9861413 -0.1297169 0.1034351 0.9861413 0.10636 -0.06599599 -0.9921351 -0.10636 0.06599599 0.9921351 0.0744105 -0.02268931 -0.9969695 -0.0744105 0.02268931 0.9969695 0.03518976 0.01982637 -0.999184 -0.03518976 -0.01982637 0.999184 -0.00710184 0.05232431 -0.9986049 0.00710184 -0.05232431 0.9986049 -0.04635118 0.07382876 -0.9961932 0.04635118 -0.07382876 0.9961932 -0.08071427 0.09135267 -0.9925421 0.08071427 -0.09135267 0.9925421 -0.1067968 0.1052276 -0.9886969 0.1067968 -0.1052276 0.9886969 -0.1229718 0.1180233 -0.9853672 0.1229718 -0.1180233 0.9853672 -0.1275643 0.1289962 -0.983406 0.1275643 -0.1289962 0.983406 -0.1187231 0.132743 -0.9840143 0.1187231 -0.132743 0.9840143 -0.09391268 0.1282735 -0.9872823 0.09391268 -0.1282735 0.9872823 -0.05052142 0.1193879 -0.9915615 0.05052142 -0.1193879 0.9915615 -0.006635326 0.1061067 -0.9943326 0.006635326 -0.1061067 0.9943326 -0.0200538 0.3595265 -0.9329194 0.0200538 -0.3595265 0.9329194 -0.01461467 0.2955872 -0.955204 0.01461467 -0.2955872 0.955204 -0.0092151 0.2164023 -0.9762608 0.0092151 -0.2164023 0.9762608 -0.004058431 0.1245298 -0.9922076 0.004058431 -0.1245298 0.9922076 -0.0002442391 0.04538506 -0.9989695 0.0002442391 -0.04538506 0.9989695 -5.523547e-019 0 -1 5.523547e-019 -0 1 -6.543011e-005 -0.02732651 -0.9996266 6.543011e-005 0.02732651 0.9996266 -9.377325e-005 -0.05336177 -0.9985752 9.377325e-005 0.05336177 0.9985752 -0.0001302234 -0.07724671 -0.997012 0.0001302234 0.07724671 0.997012 -0.000165988 -0.09806731 -0.9951798 0.000165988 0.09806731 0.9951798 -0.0001985612 -0.1150529 -0.9933593 0.0001985612 0.1150529 0.9933593 -0.0002268923 -0.1276012 -0.9918255 0.0002268923 0.1276012 0.9918255 -0.0002503786 -0.1352849 -0.9908067 0.0002503786 0.1352849 0.9908067 -0.0002686289 -0.137851 -0.9904529 0.0002686289 0.137851 0.9904529 -0.0002813762 -0.1352159 -0.9908161 0.0002813762 0.1352159 0.9908161 -0.004079719 -0.1616548 -0.9868389 0.004079719 0.1616548 0.9868389 -0.02473461 -0.2627962 -0.9645343 0.02473461 0.2627962 0.9645343 0.1171705 -0.1513657 -0.9815088 -0.1171705 0.1513657 0.9815088 0.1020981 -0.1514469 -0.9831784 -0.1020981 0.1514469 0.9831784 0.08593463 -0.1447382 -0.9857312 -0.08593463 0.1447382 0.9857312 0.06900618 -0.131314 -0.9889362 -0.06900618 0.131314 0.9889362 0.05165979 -0.111498 -0.992421 -0.05165979 0.111498 0.992421 0.03426902 -0.08588809 -0.9957153 -0.03426902 0.08588809 0.9957153 0.01723358 -0.05536801 -0.9983173 -0.01723358 0.05536801 0.9983173 0.001819611 -0.02209282 -0.9997543 -0.001819611 0.02209282 0.9997543 -0.008806402 0.005210464 -0.9999476 0.008806402 -0.005210464 0.9999476 -0.0111495 0.02261803 -0.999682 0.0111495 -0.02261803 0.999682 -0.01100695 0.0331722 -0.999389 0.01100695 -0.0331722 0.999389 -0.01047651 0.04217852 -0.9990552 0.01047651 -0.04217852 0.9990552 -0.009826828 0.04955322 -0.9987231 0.009826828 -0.04955322 0.9987231 -0.009276798 0.05299126 -0.9985519 0.009276798 -0.05299126 0.9985519 0.0212805 0.07970413 -0.9965914 -0.0212805 -0.07970413 0.9965914 0.06513915 0.1237338 -0.9901752 -0.06513915 -0.1237338 0.9901752 0.08768638 0.1775964 -0.9801891 -0.08768638 -0.1775964 0.9801891 0.08287992 0.2213501 -0.9716661 -0.08287992 -0.2213501 0.9716661 0.1863962 0.2663852 -0.9456719 -0.1863962 -0.2663852 0.9456719 0.2892391 0.277197 -0.9162437 -0.2892391 -0.277197 0.9162437 -0.02079965 0.2169654 -0.9759577 0.02079965 -0.2169654 0.9759577 -0.3256217 0.1519093 -0.9332171 0.3256217 -0.1519093 0.9332171 -0.2042861 0.1105265 -0.9726516 0.2042861 -0.1105265 0.9726516 -0.06964057 0.06118928 -0.9956938 0.06964057 -0.06118928 0.9956938 -0.04084696 -0.01599338 -0.9990374 0.04084696 0.01599338 0.9990374 0.003317735 -0.1010868 -0.9948721 -0.003317735 0.1010868 0.9948721 0.04624617 -0.1845172 -0.9817406 -0.04624617 0.1845172 0.9817406 0.06486929 -0.2650766 -0.9620428 -0.06486929 0.2650766 0.9620428 0.05779797 -0.3294603 -0.9423987 -0.05779797 0.3294603 0.9423987 0.02895158 -0.3699272 -0.9286095 -0.02895158 0.3699272 0.9286095 -0.01105107 -0.3845953 -0.9230191 0.01105107 0.3845953 0.9230191 0.254542 -0.03458782 -0.966443 -0.254542 0.03458782 0.966443 0.3029606 -0.02677931 -0.9526268 -0.3029606 0.02677931 0.9526268 0.3363538 0.0007361007 -0.9417354 -0.3363538 -0.0007361007 0.9417354 0.3517921 0.03977853 -0.9352326 -0.3517921 -0.03977853 0.9352326 0.3496563 0.08803203 -0.932733 -0.3496563 -0.08803203 0.932733 0.3286367 0.1471615 -0.9329209 -0.3286367 -0.1471615 0.9329209 0.2829145 0.2296975 -0.9312349 -0.2829145 -0.2296975 0.9312349 0.2273456 0.3192977 -0.9199799 -0.2273456 -0.3192977 0.9199799 0.1736805 0.3925893 -0.9031659 -0.1736805 -0.3925893 0.9031659 0.1224566 0.4395136 -0.8898495 -0.1224566 -0.4395136 0.8898495 0.07207558 0.4606921 -0.8846287 -0.07207558 -0.4606921 0.8846287 0.02410909 0.4603395 -0.8874155 -0.02410909 -0.4603395 0.8874155 -0.01185874 0.4427051 -0.8965888 0.01185874 -0.4427051 0.8965888 -0.02547033 0.4075569 -0.9128245 0.02547033 -0.4075569 0.9128245 -0.05029296 -0.3731028 -0.9264259 0.05029296 0.3731028 0.9264259 0.1308393 -0.1446481 -0.9807946 -0.1308393 0.1446481 0.9807946 0.1470194 -0.141505 -0.9789595 -0.1470194 0.141505 0.9789595 0.1522351 -0.1271133 -0.9801361 -0.1522351 0.1271133 0.9801361 0.1463281 -0.1020425 -0.983959 -0.1463281 0.1020425 0.983959 0.129735 -0.06758341 -0.9892428 -0.129735 0.06758341 0.9892428 0.1025878 -0.02989717 -0.9942746 -0.1025878 0.02989717 0.9942746 0.06385141 0.005322848 -0.9979452 -0.06385141 -0.005322848 0.9979452 0.01776577 0.03389688 -0.9992674 -0.01776577 -0.03389688 0.9992674 -0.02641626 0.0538925 -0.9981973 0.02641626 -0.0538925 0.9981973 -0.06655823 0.0715835 -0.9952114 0.06655823 -0.0715835 0.9952114 -0.09986758 0.08704107 -0.9911863 0.09986758 -0.08704107 0.9911863 -0.1243092 0.1011061 -0.9870789 0.1243092 -0.1011061 0.9870789 -0.1388094 0.1146343 -0.983662 0.1388094 -0.1146343 0.983662 -0.139863 0.1223785 -0.9825792 0.139863 -0.1223785 0.9825792 -0.1264915 0.1219882 -0.9844383 0.1264915 -0.1219882 0.9844383 -0.09884516 0.1130288 -0.9886628 0.09884516 -0.1130288 0.9886628 -0.05986262 0.09508916 -0.9936672 0.05986262 -0.09508916 0.9936672 -0.01872815 0.06769184 -0.9975305 0.01872815 -0.06769184 0.9975305 -0.0227048 0.354008 -0.9349668 0.0227048 -0.354008 0.9349668 -0.01352433 0.2900395 -0.9569191 0.01352433 -0.2900395 0.9569191 -0.004434797 0.2109538 -0.977486 0.004434797 -0.2109538 0.977486 0.004302951 0.1195524 -0.9928186 -0.004302951 -0.1195524 0.9928186 0.0119104 0.04325406 -0.9989931 -0.0119104 -0.04325406 0.9989931 0.01389375 5.504121e-005 -0.9999035 -0.01389375 -5.504121e-005 0.9999035 0.0137786 -0.02718909 -0.9995353 -0.0137786 0.02718909 0.9995353 0.0134817 -0.05316334 -0.9984948 -0.0134817 0.05316334 0.9984948 0.01304951 -0.07697695 -0.9969475 -0.01304951 0.07697695 0.9969475 0.01246688 -0.09772815 -0.9951351 -0.01246688 0.09772815 0.9951351 0.01174377 -0.1146502 -0.9933365 -0.01174377 0.1146502 0.9933365 0.01089746 -0.1271428 -0.9918246 -0.01089746 0.1271428 0.9918246 0.009947765 -0.1347802 -0.9908256 -0.009947765 0.1347802 0.9908256 0.00891542 -0.1373104 -0.9904879 -0.00891542 0.1373104 0.9904879 0.007821336 -0.1346507 -0.9908623 -0.007821336 0.1346507 0.9908623 0.006665654 -0.1499584 -0.9886698 -0.006665654 0.1499584 0.9886698 -0.01730436 -0.2396576 -0.9707032 0.01730436 0.2396576 0.9707032 -0.05496939 -0.3487632 -0.9355974 0.05496939 0.3487632 0.9355974 0.1044354 -0.1365655 -0.9851107 -0.1044354 0.1365655 0.9851107 0.08788278 -0.1353443 -0.9868934 -0.08788278 0.1353443 0.9868934 0.07025903 -0.1276622 -0.989326 -0.07025903 0.1276622 0.989326 0.051946 -0.113622 -0.9921651 -0.051946 0.113622 0.9921651 0.0333518 -0.09358202 -0.9950528 -0.0333518 0.09358202 0.9950528 0.01491192 -0.06817431 -0.997562 -0.01491192 0.06817431 0.997562 -0.00257167 -0.03865644 -0.9992493 0.00257167 0.03865644 0.9992493 -0.01642663 -0.01057876 -0.9998091 0.01642663 0.01057876 0.9998091 -0.02179582 0.01002224 -0.9997122 0.02179582 -0.01002224 0.9997122 -0.02239253 0.02239253 -0.9994984 0.02239253 -0.02239253 0.9994984 -0.0216262 0.03248605 -0.9992382 0.0216262 -0.03248605 0.9992382 -0.02059776 0.04130415 -0.9989343 0.02059776 -0.04130415 0.9989343 -0.01933049 0.04850968 -0.9986356 0.01933049 -0.04850968 0.9986356 -0.018251 0.05206126 -0.9984771 0.018251 -0.05206126 0.9984771 0.008372724 0.04426205 -0.9989849 -0.008372724 -0.04426205 0.9989849 0.02307844 0.05653339 -0.9981339 -0.02307844 -0.05653339 0.9981339 0.04019621 0.08801542 -0.9953078 -0.04019621 -0.08801542 0.9953078 0.04828699 0.1226472 -0.9912749 -0.04828699 -0.1226472 0.9912749 0.08969083 0.1619351 -0.9827169 -0.08969083 -0.1619351 0.9827169 0.1817258 0.3012442 -0.9360703 -0.1817258 -0.3012442 0.9360703 0.0435157 0.4004826 -0.9152705 -0.0435157 -0.4004826 0.9152705 -0.1600682 0.2617967 -0.9517566 0.1600682 -0.2617967 0.9517566 -0.09644692 0.09248011 -0.9910325 0.09644692 -0.09248011 0.9910325 0.0006677311 0.0163203 -0.9998666 -0.0006677311 -0.0163203 0.9998666 0.04023388 -0.05550597 -0.9976474 -0.04023388 0.05550597 0.9976474 0.09843965 -0.1491675 -0.9838997 -0.09843965 0.1491675 0.9838997 0.1382144 -0.2457744 -0.9594226 -0.1382144 0.2457744 0.9594226 0.1434209 -0.3281402 -0.9336779 -0.1434209 0.3281402 0.9336779 0.1142414 -0.3905003 -0.9134869 -0.1142414 0.3905003 0.9134869 0.05938876 -0.4298978 -0.9009222 -0.05938876 0.4298978 0.9009222 -0.008670068 -0.4441491 -0.895911 0.008670068 0.4441491 0.895911 -0.07594309 -0.4326978 -0.8983347 0.07594309 0.4326978 0.8983347 0.2970404 -0.04332946 -0.9538813 -0.2970404 0.04332946 0.9538813 0.3334588 -0.03260301 -0.9422008 -0.3334588 0.03260301 0.9422008 0.354647 0.0007106386 -0.935 -0.354647 -0.0007106386 0.935 0.3579697 0.04540414 -0.9326286 -0.3579697 -0.04540414 0.9326286 0.3437599 0.09904206 -0.93382 -0.3437599 -0.09904206 0.93382 0.3087048 0.1681576 -0.9361754 -0.3087048 -0.1681576 0.9361754 0.2516825 0.254252 -0.9338157 -0.2516825 -0.254252 0.9338157 0.1882454 0.3363111 -0.9227451 -0.1882454 -0.3363111 0.9227451 0.1276864 0.4008052 -0.9072218 -0.1276864 -0.4008052 0.9072218 0.07175476 0.4419885 -0.8941462 -0.07175476 -0.4419885 0.8941462 0.02288106 0.4604237 -0.8874044 -0.02288106 -0.4604237 0.8874044 -0.01324537 0.4597745 -0.8879369 0.01324537 -0.4597745 0.8879369 -0.0308604 0.4400549 -0.8974404 0.0308604 -0.4400549 0.8974404 -0.03184027 0.402151 -0.9150195 0.03184027 -0.402151 0.9150195 -0.0948623 -0.4098727 -0.9071965 0.0948623 0.4098727 0.9071965 0.1195571 -0.1314574 -0.9840859 -0.1195571 0.1314574 0.9840859 0.1428003 -0.1316644 -0.9809549 -0.1428003 0.1316644 0.9809549 0.1554948 -0.12079 -0.980424 -0.1554948 0.12079 0.980424 0.1571209 -0.09919855 -0.9825847 -0.1571209 0.09919855 0.9825847 0.1474144 -0.06895502 -0.9866682 -0.1474144 0.06895502 0.9866682 0.1254578 -0.03913507 -0.9913268 -0.1254578 0.03913507 0.9913268 0.08862055 -0.01267289 -0.9959848 -0.08862055 0.01267289 0.9959848 0.0428863 0.01210022 -0.9990067 -0.0428863 -0.01210022 0.9990067 -0.003538559 0.03263365 -0.9994611 0.003538559 -0.03263365 0.9994611 -0.04786374 0.05039224 -0.9975819 0.04786374 -0.05039224 0.9975819 -0.08735754 0.06705568 -0.9939176 0.08735754 -0.06705568 0.9939176 -0.1192562 0.08199224 -0.9894722 0.1192562 -0.08199224 0.9894722 -0.1428158 0.09733578 -0.9849515 0.1428158 -0.09733578 0.9849515 -0.1541542 0.1087165 -0.9820475 0.1541542 -0.1087165 0.9820475 -0.1512995 0.112329 -0.9820849 0.1512995 -0.112329 0.9820849 -0.133816 0.1075609 -0.9851517 0.133816 -0.1075609 0.9851517 -0.1019575 0.09432853 -0.9903064 0.1019575 -0.09432853 0.9903064 -0.0573933 0.0729911 -0.9956798 0.0573933 -0.0729911 0.9956798 -0.01193297 0.04915099 -0.9987201 0.01193297 -0.04915099 0.9987201 0.08691786 -0.1204264 -0.9889099 -0.08691786 0.1204264 0.9889099 0.06923035 -0.1179236 -0.9906065 -0.06923035 0.1179236 0.9906065 0.05053726 -0.1093397 -0.9927189 -0.05053726 0.1093397 0.9927189 0.0312741 -0.09481217 -0.9950038 -0.0312741 0.09481217 0.9950038 0.01190502 -0.07473517 -0.9971324 -0.01190502 0.07473517 0.9971324 -0.006915184 -0.04993762 -0.9987284 0.006915184 0.04993762 0.9987284 -0.02277071 -0.02397713 -0.9994531 0.02277071 0.02397713 0.9994531 -0.03136039 -0.003040682 -0.9995035 0.03136039 0.003040682 0.9995035 -0.03309609 0.01093081 -0.9993924 0.03309609 -0.01093081 0.9993924 -0.03248605 0.0216262 -0.9992382 0.03248605 -0.0216262 0.9992382 -0.03137931 0.03137931 -0.9990149 0.03137931 -0.03137931 0.9990149 -0.02988196 0.03988434 -0.9987574 0.02988196 -0.03988434 0.9987574 -0.02803046 0.04680987 -0.9985105 0.02803046 -0.04680987 0.9985105 -0.02644842 0.05041369 -0.9983782 0.02644842 -0.05041369 0.9983782 0.01025571 0.03941663 -0.9991702 -0.01025571 -0.03941663 0.9991702 0.01642813 0.03820093 -0.999135 -0.01642813 -0.03820093 0.999135 0.02198665 0.03998271 -0.9989584 -0.02198665 -0.03998271 0.9989584 0.02723074 0.04819346 -0.9984668 -0.02723074 -0.04819346 0.9984668 0.02924118 0.05709462 -0.9979405 -0.02924118 -0.05709462 0.9979405 0.044749 0.1083542 -0.9931047 -0.044749 -0.1083542 0.9931047 0.04948946 0.1869333 -0.9811252 -0.04948946 -0.1869333 0.9811252 -0.001435645 0.1302768 -0.9914766 0.001435645 -0.1302768 0.9914766 0.005022742 0.03913269 -0.9992214 -0.005022742 -0.03913269 0.9992214 0.05488218 -0.006246975 -0.9984733 -0.05488218 0.006246975 0.9984733 0.1255526 -0.07786611 -0.9890265 -0.1255526 0.07786611 0.9890265 0.1987237 -0.1712047 -0.9649859 -0.1987237 0.1712047 0.9649859 0.2302115 -0.2627354 -0.937002 -0.2302115 0.2627354 0.937002 0.2185412 -0.3398632 -0.914731 -0.2185412 0.3398632 0.914731 0.1690025 -0.3992182 -0.9011454 -0.1690025 0.3992182 0.9011454 0.08937393 -0.4378181 -0.8946103 -0.08937393 0.4378181 0.8946103 -0.006458091 -0.4520077 -0.8919906 0.006458091 0.4520077 0.8919906 -0.1014189 -0.4405322 -0.8919897 0.1014189 0.4405322 0.8919897 -0.1347752 -0.419808 -0.8975505 0.1347752 0.419808 0.8975505 0.3276465 -0.05198304 -0.9433692 -0.3276465 0.05198304 0.9433692 0.3517814 -0.03837461 -0.9352953 -0.3517814 0.03837461 0.9352953 0.3607704 0.0006757767 -0.9326544 -0.3607704 -0.0006757767 0.9326544 0.3520299 0.05090527 -0.9346035 -0.3520299 -0.05090527 0.9346035 0.3254641 0.1108086 -0.9390392 -0.3254641 -0.1108086 0.9390392 0.2767851 0.1897112 -0.9420189 -0.2767851 -0.1897112 0.9420189 0.2118246 0.2778009 -0.9369936 -0.2118246 -0.2778009 0.9369936 0.1435866 0.3539157 -0.9241897 -0.1435866 -0.3539157 0.9241897 0.08030442 0.4112543 -0.9079764 -0.08030442 -0.4112543 0.9079764 0.0271153 0.4474158 -0.8939149 -0.0271153 -0.4474158 0.8939149 -0.01246415 0.4627674 -0.8863921 0.01246415 -0.4627674 0.8863921 -0.03625334 0.4577186 -0.8883577 0.03625334 -0.4577186 0.8883577 -0.04089927 0.4348086 -0.8995936 0.04089927 -0.4348086 0.8995936 0.1031928 -0.1169499 -0.9877621 -0.1031928 0.1169499 0.9877621 0.1329063 -0.1203725 -0.9837918 -0.1329063 0.1203725 0.9837918 0.1527596 -0.1129999 -0.9817818 -0.1527596 0.1129999 0.9817818 0.1618463 -0.09497534 -0.9822349 -0.1618463 0.09497534 0.9822349 0.1592545 -0.07026574 -0.9847338 -0.1592545 0.07026574 0.9847338 0.1424769 -0.04916945 -0.9885761 -0.1424769 0.04916945 0.9885761 0.1105106 -0.0312307 -0.9933841 -0.1105106 0.0312307 0.9933841 0.06803747 -0.009513782 -0.9976374 -0.06803747 0.009513782 0.9976374 0.02127073 0.01084796 -0.9997149 -0.02127073 -0.01084796 0.9997149 -0.02567346 0.02813993 -0.9992742 0.02567346 -0.02813993 0.9992742 -0.07000594 0.0454207 -0.996512 0.07000594 -0.0454207 0.996512 -0.1084932 0.06105522 -0.9922205 0.1084932 -0.06105522 0.9922205 -0.1398982 0.07740606 -0.9871357 0.1398982 -0.07740606 0.9871357 -0.1610543 0.09203217 -0.9826452 0.1610543 -0.09203217 0.9826452 -0.1686533 0.09953803 -0.9806367 0.1686533 -0.09953803 0.9806367 -0.1617346 0.09891264 -0.9818647 0.1617346 -0.09891264 0.9818647 -0.1400953 0.0898521 -0.9860527 0.1400953 -0.0898521 0.9860527 -0.1042326 0.07259817 -0.9918997 0.1042326 -0.07259817 0.9918997 -0.05646472 0.04904885 -0.997199 0.05646472 -0.04904885 0.997199 -0.01376005 0.0372105 -0.9992127 0.01376005 -0.0372105 0.9992127 0.06522972 -0.1032616 -0.992513 -0.06522972 0.1032616 0.992513 0.04679463 -0.09954478 -0.9939321 -0.04679463 0.09954478 0.9939321 0.02746664 -0.09017675 -0.995547 -0.02746664 0.09017675 0.995547 0.007729003 -0.0753321 -0.9971285 -0.007729003 0.0753321 0.9971285 -0.01174142 -0.05560412 -0.9983839 0.01174142 0.05560412 0.9983839 -0.02857451 -0.03352055 -0.9990295 0.02857451 0.03352055 0.9990295 -0.03896464 -0.014317 -0.999138 0.03896464 0.014317 0.999138 -0.04217503 -0.0003803375 -0.9991102 0.04217503 0.0003803375 0.9991102 -0.04217852 0.01047651 -0.9990552 0.04217852 -0.01047651 0.9990552 -0.04130415 0.02059776 -0.9989343 0.04130415 -0.02059776 0.9989343 -0.03988434 0.02988196 -0.9987574 0.03988434 -0.02988196 0.9987574 -0.03795609 0.03795609 -0.9985583 0.03795609 -0.03795609 0.9985583 -0.03556685 0.04449693 -0.9983762 0.03556685 -0.04449693 0.9983762 -0.03352099 0.04809032 -0.9982804 0.03352099 -0.04809032 0.9982804 0.009308346 0.03683047 -0.9992782 -0.009308346 -0.03683047 0.9992782 0.01809908 0.03610368 -0.9991841 -0.01809908 -0.03610368 0.9991841 0.02288747 0.03428112 -0.9991501 -0.02288747 -0.03428112 0.9991501 0.02669497 0.03201313 -0.9991309 -0.02669497 -0.03201313 0.9991309 0.029354 0.029354 -0.999138 -0.029354 -0.029354 0.999138 0.03074304 0.02636643 -0.9991795 -0.03074304 -0.02636643 0.9991795 0.03079134 0.02311982 -0.9992584 -0.03079134 -0.02311982 0.9992584 0.02948175 0.0196885 -0.9993714 -0.02948175 -0.0196885 0.9993714 0.03788669 0.01190886 -0.9992111 -0.03788669 -0.01190886 0.9992111 0.1037991 -0.01733571 -0.9944472 -0.1037991 0.01733571 0.9944472 0.207524 -0.0839875 -0.9746178 -0.207524 0.0839875 0.9746178 0.2857741 -0.1606049 -0.944743 -0.2857741 0.1606049 0.944743 0.3112064 -0.2348309 -0.9208719 -0.3112064 0.2348309 0.9208719 0.2872083 -0.3016412 -0.9091336 -0.2872083 0.3016412 0.9091336 0.2209028 -0.3556529 -0.9081371 -0.2209028 0.3556529 0.9081371 0.1187778 -0.3925564 -0.9120259 -0.1187778 0.3925564 0.9120259 -0.004316624 -0.4066751 -0.9135626 0.004316624 0.4066751 0.9135626 -0.1264787 -0.3956039 -0.9096707 0.1264787 0.3956039 0.9096707 -0.1742942 -0.3775868 -0.9094227 0.1742942 0.3775868 0.9094227 0.3461536 -0.06050517 -0.9362248 -0.3461536 0.06050517 0.9362248 0.3579448 -0.04407146 -0.9327021 -0.3579448 0.04407146 0.9327021 0.354754 0.0006317346 -0.9349595 -0.354754 -0.0006317346 0.9349595 0.3339711 0.05622317 -0.940905 -0.3339711 -0.05622317 0.940905 0.2951866 0.1239537 -0.947365 -0.2951866 -0.1239537 0.947365 0.2348731 0.2107177 -0.9489113 -0.2348731 -0.2107177 0.9489113 0.1647302 0.2984892 -0.9400894 -0.1647302 -0.2984892 0.9400894 0.09609641 0.3703815 -0.9238956 -0.09609641 -0.3703815 0.9238956 0.03616218 0.4218837 -0.9059285 -0.03616218 -0.4218837 0.9059285 -0.01051523 0.4519306 -0.8919911 0.01051523 -0.4519306 0.8919911 -0.04168557 0.4610824 -0.8863776 0.04168557 -0.4610824 0.8863776 -0.04991299 0.4526572 -0.8902865 0.04991299 -0.4526572 0.8902865 0.08232076 -0.1013924 -0.9914348 -0.08232076 0.1013924 0.9914348 0.1176759 -0.1078194 -0.9871815 -0.1176759 0.1078194 0.9871815 0.1441613 -0.1038765 -0.984087 -0.1441613 0.1038765 0.984087 0.1604427 -0.08945762 -0.9829829 -0.1604427 0.08945762 0.9829829 0.1651291 -0.07092217 -0.9837187 -0.1651291 0.07092217 0.9837187 0.1545518 -0.05842887 -0.9862555 -0.1545518 0.05842887 0.9862555 0.1290168 -0.04805924 -0.9904771 -0.1290168 0.04805924 0.9904771 0.09154458 -0.0301786 -0.9953436 -0.09154458 0.0301786 0.9953436 0.0465511 -0.01118892 -0.9988532 -0.0465511 0.01118892 0.9988532 -0.001187828 0.005264322 -0.9999854 0.001187828 -0.005264322 0.9999854 -0.04876994 0.02246296 -0.9985574 0.04876994 -0.02246296 0.9985574 -0.09255612 0.03886033 -0.9949489 0.09255612 -0.03886033 0.9949489 -0.1304513 0.05548786 -0.9899008 0.1304513 -0.05548786 0.9899008 -0.1607322 0.0724567 -0.9843349 0.1607322 -0.0724567 0.9843349 -0.1784896 0.08370009 -0.9803753 0.1784896 -0.08370009 0.9803753 -0.1821149 0.08717136 -0.9794056 0.1821149 -0.08717136 0.9794056 -0.1710284 0.08235025 -0.9818186 0.1710284 -0.08235025 0.9818186 -0.1452418 0.0692372 -0.9869706 0.1452418 -0.0692372 0.9869706 -0.1055446 0.04845214 -0.9932335 0.1055446 -0.04845214 0.9932335 -0.05889469 0.03144706 -0.9977688 0.05889469 -0.03144706 0.9977688 -0.02023037 0.03241424 -0.9992698 0.02023037 -0.03241424 0.9992698 0.05800557 -0.08987304 -0.9942626 -0.05800557 0.08987304 0.9942626 0.0395497 -0.08533662 -0.9955669 -0.0395497 0.08533662 0.9955669 0.02013944 -0.07550394 -0.9969421 -0.02013944 0.07550394 0.9969421 0.0002749737 -0.060568 -0.998164 -0.0002749737 0.060568 0.998164 -0.01951354 -0.04163223 -0.9989424 0.01951354 0.04163223 0.9989424 -0.03767873 -0.02509523 -0.9989747 0.03767873 0.02509523 0.9989747 -0.04532796 -0.01100803 -0.9989115 0.04532796 0.01100803 0.9989115 -0.04664733 -7.971858e-005 -0.9989114 0.04664733 7.971858e-005 0.9989114 -0.04648592 0.01000823 -0.9988688 0.04648592 -0.01000823 0.9988688 -0.04568986 0.01968122 -0.9987618 0.04568986 -0.01968122 0.9987618 -0.04428083 0.02854074 -0.9986114 0.04428083 -0.02854074 0.9986114 -0.04229538 0.03622317 -0.9984483 0.04229538 -0.03622317 0.9984483 -0.03978276 0.04241359 -0.9983078 0.03978276 -0.04241359 0.9983078 -0.03757671 0.04592306 -0.998238 0.03757671 -0.04592306 0.998238 0.007647422 0.03305812 -0.9994242 -0.007647422 -0.03305812 0.9994242 0.01958425 0.03258131 -0.9992772 -0.01958425 -0.03258131 0.9992772 0.02481307 0.03099246 -0.9992116 -0.02481307 -0.03099246 0.9992116 0.02901473 0.02901473 -0.9991578 -0.02901473 -0.02901473 0.9991578 0.03201313 0.02669497 -0.9991309 -0.03201313 -0.02669497 0.9991309 0.03367975 0.02408718 -0.9991424 -0.03367975 -0.02408718 0.9991424 0.03393843 0.02125109 -0.999198 -0.03393843 -0.02125109 0.999198 0.03276834 0.0182506 -0.9992963 -0.03276834 -0.0182506 0.9992963 0.05778592 0.01124666 -0.9982656 -0.05778592 -0.01124666 0.9982656 0.1497195 -0.02116119 -0.988502 -0.1497195 0.02116119 0.988502 0.2714542 -0.07170147 -0.9597768 -0.2714542 0.07170147 0.9597768 0.3486242 -0.1252356 -0.928858 -0.3486242 0.1252356 0.928858 0.3715674 -0.1768996 -0.9113968 -0.3715674 0.1768996 0.9113968 0.342081 -0.2258894 -0.9121154 -0.342081 0.2258894 0.9121154 0.2649353 -0.2679084 -0.9263015 -0.2649353 0.2679084 0.9263015 0.1451026 -0.2980281 -0.9434641 -0.1451026 0.2980281 0.9434641 -0.002246776 -0.3100112 -0.9507302 0.002246776 0.3100112 0.9507302 -0.1489423 -0.3006882 -0.9420206 0.1489423 0.3006882 0.9420206 -0.2107589 -0.2866885 -0.9345536 0.2107589 0.2866885 0.9345536 0.3525695 -0.06886553 -0.9332483 -0.3525695 0.06886553 0.9332483 0.351987 -0.04966258 -0.9346864 -0.351987 0.04966258 0.9346864 0.3365773 0.0005778953 -0.9416556 -0.3365773 -0.0005778953 0.9416556 0.3037521 0.06131506 -0.9507761 -0.3037521 -0.06131506 0.9507761 0.2527263 0.1372101 -0.9577593 -0.2527263 -0.1372101 0.9577593 0.1839187 0.229087 -0.9558729 -0.1839187 -0.229087 0.9558729 0.1115851 0.3144567 -0.9426907 -0.1115851 -0.3144567 0.9426907 0.04538906 0.3812923 -0.9233396 -0.04538906 -0.3812923 0.9233396 -0.008596078 0.4265779 -0.90441 0.008596078 -0.4265779 0.90441 -0.04717593 0.4502734 -0.8916436 0.04717593 -0.4502734 0.8916436 -0.05893104 0.4562256 -0.8879106 0.05893104 -0.4562256 0.8879106 0.07504345 -0.08915472 -0.9931867 -0.07504345 0.08915472 0.9931867 0.09765141 -0.09422665 -0.99075 -0.09765141 0.09422665 0.99075 0.1300176 -0.09357146 -0.9870865 -0.1300176 0.09357146 0.9870865 0.1530287 -0.08274237 -0.9847517 -0.1530287 0.08274237 0.9847517 0.1649367 -0.07031996 -0.9837942 -0.1649367 0.07031996 0.9837942 0.1616702 -0.06589899 -0.9846421 -0.1616702 0.06589899 0.9846421 0.1435031 -0.06230259 -0.9876868 -0.1435031 0.06230259 0.9876868 0.1122778 -0.04920554 -0.9924578 -0.1122778 0.04920554 0.9924578 0.07098935 -0.03275485 -0.9969391 -0.07098935 0.03275485 0.9969391 0.02429774 -0.01774288 -0.9995473 -0.02429774 0.01774288 0.9995473 -0.02479202 -0.001388825 -0.9996917 0.02479202 0.001388825 0.9996917 -0.07247892 0.01547154 -0.9972499 0.07247892 -0.01547154 0.9972499 -0.1156371 0.03199258 -0.9927762 0.1156371 -0.03199258 0.9927762 -0.1535476 0.05018187 -0.9868662 0.1535476 -0.05018187 0.9868662 -0.1809416 0.06492855 -0.9813483 0.1809416 -0.06492855 0.9813483 -0.1948621 0.07240684 -0.9781544 0.1948621 -0.07240684 0.9781544 -0.1943491 0.07184384 -0.978298 0.1943491 -0.07184384 0.978298 -0.1790429 0.0630187 -0.9818209 0.1790429 -0.0630187 0.9818209 -0.1491722 0.04624728 -0.9877291 0.1491722 -0.04624728 0.9877291 -0.1073325 0.02651487 -0.9938696 0.1073325 -0.02651487 0.9938696 -0.06738091 0.02352122 -0.99745 0.06738091 -0.02352122 0.99745 -0.02695369 0.02879748 -0.9992218 0.02695369 -0.02879748 0.9992218 0.005992882 0.02813058 -0.9995863 -0.005992882 -0.02813058 0.9995863 0.02084994 0.02777286 -0.9993968 -0.02084994 -0.02777286 0.9993968 0.02645437 0.02645437 -0.9992999 -0.02645437 -0.02645437 0.9992999 0.03099246 0.02481307 -0.9992116 -0.03099246 -0.02481307 0.9992116 0.03428112 0.02288747 -0.9991501 -0.03428112 -0.02288747 0.9991501 0.03618596 0.02072184 -0.9991302 -0.03618596 -0.02072184 0.9991302 0.03662632 0.01836516 -0.9991603 -0.03662632 -0.01836516 0.9991603 0.03557824 0.01586984 -0.9992409 -0.03557824 -0.01586984 0.9992409 0.07375373 0.01173303 -0.9972075 -0.07375373 -0.01173303 0.9972075 0.1846113 -0.008791595 -0.9827723 -0.1846113 0.008791595 0.9827723 0.3121303 -0.03694605 -0.9493206 -0.3121303 0.03694605 0.9493206 0.3882499 -0.06587164 -0.9191969 -0.3882499 0.06587164 0.9191969 0.4098434 -0.09414728 -0.9072842 -0.4098434 0.09414728 0.9072842 0.377893 -0.1214662 -0.9178468 -0.377893 0.1214662 0.9178468 0.2944557 -0.1455523 -0.944516 -0.2944557 0.1455523 0.944516 0.1631357 -0.1631357 -0.9730229 -0.1631357 0.1631357 0.9730229 -0.001148952 -0.1704401 -0.9853674 0.001148952 0.1704401 0.9853674 -0.1649184 -0.1649184 -0.9724217 0.1649184 0.1649184 0.9724217 -0.2380784 -0.1570828 -0.958459 0.2380784 0.1570828 0.958459 0.3469344 -0.07701826 -0.9347217 -0.3469344 0.07701826 0.9347217 0.3339048 -0.05509018 -0.9409956 -0.3339048 0.05509018 0.9409956 0.3062307 0.0005131687 -0.9519572 -0.3062307 -0.0005131687 0.9519572 0.2614348 0.0664055 -0.9629341 -0.2614348 -0.0664055 0.9629341 0.199374 0.149549 -0.9684447 -0.199374 -0.149549 0.9684447 0.1257855 0.2435163 -0.9617057 -0.1257855 -0.2435163 0.9617057 0.05431513 0.3250283 -0.9441432 -0.05431513 -0.3250283 0.9441432 -0.006697392 0.3860497 -0.9224537 0.006697392 -0.3860497 0.9224537 -0.05270079 0.4249682 -0.9036729 0.05270079 -0.4249682 0.9036729 -0.06798024 0.4456498 -0.8926225 0.06798024 -0.4456498 0.8926225 0.09024053 -0.08344242 -0.9924183 -0.09024053 0.08344242 0.9924183 0.1108439 -0.08226313 -0.9904274 -0.1108439 0.08226313 0.9904274 0.1399053 -0.07494261 -0.9873247 -0.1399053 0.07494261 0.9873247 0.1587541 -0.06802674 -0.9849718 -0.1587541 0.06802674 0.9849718 0.1634905 -0.07093614 -0.9839913 -0.1634905 0.07093614 0.9839913 0.1533529 -0.07370952 -0.9854186 -0.1533529 0.07370952 0.9854186 0.1293341 -0.0660204 -0.9894008 -0.1293341 0.0660204 0.9894008 0.09337338 -0.05315624 -0.9942112 -0.09337338 0.05315624 0.9942112 0.04946681 -0.04036996 -0.9979596 -0.04946681 0.04036996 0.9979596 0.000668048 -0.02563754 -0.9996711 -0.000668048 0.02563754 0.9996711 -0.04928691 -0.008993439 -0.9987442 0.04928691 0.008993439 0.9987442 -0.09665618 0.007439757 -0.99529 0.09665618 -0.007439757 0.99529 -0.1402172 0.02568217 -0.9897876 0.1402172 -0.02568217 0.9897876 -0.1763135 0.04338369 -0.9833776 0.1763135 -0.04338369 0.9833776 -0.2000806 0.05471629 -0.9782504 0.2000806 -0.05471629 0.9782504 -0.2099313 0.05838527 -0.9759713 0.2099313 -0.05838527 0.9759713 -0.2051714 0.05393474 -0.9772388 0.2051714 -0.05393474 0.9772388 -0.1856469 0.0414439 -0.9817421 0.1856469 -0.0414439 0.9817421 -0.1523999 0.0223513 -0.9880661 0.1523999 -0.0223513 0.9880661 -0.114126 0.01461349 -0.9933588 0.114126 -0.01461349 0.9933588 -0.07595969 0.02164882 -0.9968758 0.07595969 -0.02164882 0.9968758 -0.03218118 0.02485687 -0.9991729 0.03218118 -0.02485687 0.9991729 0.00483422 0.02219987 -0.9997419 -0.00483422 -0.02219987 0.9997419 0.02186693 0.02186693 -0.9995217 -0.02186693 -0.02186693 0.9995217 0.02777286 0.02084994 -0.9993968 -0.02777286 -0.02084994 0.9993968 0.03258131 0.01958425 -0.9992772 -0.03258131 -0.01958425 0.9992772 0.03610368 0.01809908 -0.9991841 -0.03610368 -0.01809908 0.9991841 0.03820093 0.01642813 -0.999135 -0.03820093 -0.01642813 0.999135 0.03878878 0.01460875 -0.9991406 -0.03878878 -0.01460875 0.9991406 0.03784076 0.01268097 -0.9992033 -0.03784076 -0.01268097 0.9992033 0.07906814 0.01219133 -0.9967947 -0.07906814 -0.01219133 0.9967947 0.1983762 0.01178444 -0.9800551 -0.1983762 -0.01178444 0.9800551 0.3269131 0.009769812 -0.9450039 -0.3269131 -0.009769812 0.9450039 0.4026429 0.007651882 -0.9153251 -0.4026429 -0.007651882 0.9153251 0.423954 0.005628525 -0.9056662 -0.423954 -0.005628525 0.9056662 0.3914322 0.003682578 -0.9201996 -0.3914322 -0.003682578 0.9201996 0.3057982 0.002106967 -0.952094 -0.3057982 -0.002106967 0.952094 0.1704401 0.001148952 -0.9853674 -0.1704401 -0.001148952 0.9853674 4.477977e-018 0 -1 -4.477977e-018 -0 1 -0.1704401 -0.001148952 -0.9853674 0.1704401 0.001148952 0.9853674 -0.2504053 -0.001675183 -0.9681396 0.2504053 0.001675183 0.9681396 0.3292553 -0.08487526 -0.9404186 -0.3292553 0.08487526 0.9404186 0.3037129 -0.06025389 -0.9508564 -0.3037129 0.06025389 0.9508564 0.2638968 0.0004366535 -0.9645508 -0.2638968 -0.0004366535 0.9645508 0.2079965 0.07175823 -0.9754938 -0.2079965 -0.07175823 0.9754938 0.1375934 0.160069 -0.9774692 -0.1375934 -0.160069 0.9774692 0.06261649 0.2532329 -0.9653767 -0.06261649 -0.2532329 0.9653767 -0.004818406 0.3297368 -0.9440606 0.004818406 -0.3297368 0.9440606 -0.05816774 0.3845373 -0.921275 0.05816774 -0.3845373 0.921275 -0.07702423 0.4206263 -0.9039584 0.07702423 -0.4206263 0.9039584 0.1032194 -0.07319997 -0.9919614 -0.1032194 0.07319997 0.9919614 0.1215662 -0.0661937 -0.9903737 -0.1215662 0.0661937 0.9903737 0.1468321 -0.06379318 -0.9871022 -0.1468321 0.06379318 0.9871022 0.1598381 -0.07309776 -0.9844331 -0.1598381 0.07309776 0.9844331 0.1581364 -0.08210199 -0.983998 -0.1581364 0.08210199 0.983998 0.1420085 -0.0801973 -0.9866114 -0.1420085 0.0801973 0.9866114 0.1126376 -0.07181819 -0.9910373 -0.1126376 0.07181819 0.9910373 0.07305989 -0.06211854 -0.9953911 -0.07305989 0.06211854 0.9953911 0.02630247 -0.04974739 -0.9984154 -0.02630247 0.04974739 0.9984154 -0.02412699 -0.03409965 -0.9991272 0.02412699 0.03409965 0.9991272 -0.07426027 -0.01756638 -0.9970842 0.07426027 0.01756638 0.9970842 -0.1216768 -5.479968e-005 -0.9925698 0.1216768 5.479968e-005 0.9925698 -0.1650782 0.01929931 -0.9860916 0.1650782 -0.01929931 0.9860916 -0.1980362 0.03423873 -0.9795965 0.1980362 -0.03423873 0.9795965 -0.2178555 0.0420551 -0.9750745 0.2178555 -0.0420551 0.9750745 -0.2234681 0.04201551 -0.9738053 0.2234681 -0.04201551 0.9738053 -0.2144086 0.03396475 -0.9761533 0.2144086 -0.03396475 0.9761533 -0.1907727 0.01833409 -0.981463 0.1907727 -0.01833409 0.981463 -0.1570141 0.007285711 -0.9875695 0.1570141 -0.007285711 0.9875695 -0.1228955 0.01340539 -0.9923291 0.1228955 -0.01340539 0.9923291 -0.08239838 0.01996244 -0.9963995 0.08239838 -0.01996244 0.9963995 -0.03543123 0.02048584 -0.9991621 0.03543123 -0.02048584 0.9991621 0.004488708 0.01545643 -0.9998705 -0.004488708 -0.01545643 0.9998705 0.022611 0.0150947 -0.9996304 -0.022611 -0.0150947 0.9996304 0.02873672 0.01440432 -0.9994832 -0.02873672 -0.01440432 0.9994832 0.03374279 0.01354533 -0.9993388 -0.03374279 -0.01354533 0.9993388 0.03743647 0.01253698 -0.9992204 -0.03743647 -0.01253698 0.9992204 0.03967521 0.01140179 -0.9991476 -0.03967521 -0.01140179 0.9991476 0.040372 0.01016487 -0.999133 -0.040372 -0.01016487 0.999133 0.03949854 0.008853182 -0.9991804 -0.03949854 -0.008853182 0.9991804 0.07232813 0.01136146 -0.9973162 -0.07232813 -0.01136146 0.9973162 0.1869062 0.03100231 -0.9818885 -0.1869062 -0.03100231 0.9818885 0.3150387 0.05572839 -0.9474413 -0.3150387 -0.05572839 0.9474413 0.3908986 0.08041048 -0.9169146 -0.3908986 -0.08041048 0.9169146 0.4124191 0.104582 -0.9049713 -0.4124191 -0.104582 0.9049713 0.3804737 0.1279998 -0.9158907 -0.3804737 -0.1279998 0.9158907 0.2963694 0.1491851 -0.9433499 -0.2963694 -0.1491851 0.9433499 0.1649184 0.1649184 -0.9724217 -0.1649184 -0.1649184 0.9724217 0.001148952 0.1704401 -0.9853674 -0.001148952 -0.1704401 0.9853674 -0.1631357 0.1631357 -0.9730229 0.1631357 -0.1631357 0.9730229 -0.2443443 0.1537254 -0.9574259 0.2443443 -0.1537254 0.9574259 0.299561 -0.09228401 -0.9496035 -0.299561 0.09228401 0.9496035 0.2616215 -0.06500111 -0.9629793 -0.2616215 0.06500111 0.9629793 0.2102298 0.0003488901 -0.9776519 -0.2102298 -0.0003488901 0.9776519 0.1449117 0.07662397 -0.9864732 -0.1449117 -0.07662397 0.9864732 0.0697424 0.1676759 -0.9833722 -0.0697424 -0.1676759 0.9833722 -0.002974189 0.2576932 -0.9662222 0.002974189 -0.2576932 0.9662222 -0.0633912 0.3284073 -0.9424066 0.0633912 -0.3284073 0.9424066 -0.08592152 0.3805526 -0.920759 0.08592152 -0.3805526 0.920759 0.1136549 -0.05907675 -0.9917623 -0.1136549 0.05907675 0.9917623 0.1296004 -0.05756448 -0.989894 -0.1296004 0.05756448 0.989894 0.150696 -0.07214472 -0.9859441 -0.150696 0.07214472 0.9859441 0.1575967 -0.08736147 -0.9836317 -0.1575967 0.08736147 0.9836317 0.1497948 -0.09145824 -0.984478 -0.1497948 0.09145824 0.984478 0.127904 -0.08830515 -0.9878475 -0.127904 0.08830515 0.9878475 0.09394511 -0.0825316 -0.9921506 -0.09394511 0.0825316 0.9921506 0.05083475 -0.07317971 -0.9960224 -0.05083475 0.07317971 0.9960224 0.001720419 -0.05929603 -0.998239 -0.001720419 0.05929603 0.998239 -0.04963732 -0.04313832 -0.9978353 0.04963732 0.04313832 0.9978353 -0.09969695 -0.02638085 -0.9946681 0.09969695 0.02638085 0.9946681 -0.1479737 -0.006907325 -0.9889672 0.1479737 0.006907325 0.9889672 -0.1891457 0.01117933 -0.9818854 0.1891457 -0.01117933 0.9818854 -0.218358 0.02297227 -0.9755983 0.218358 -0.02297227 0.9755983 -0.233992 0.02732231 -0.9718545 0.233992 -0.02732231 0.9718545 -0.2352612 0.02381108 -0.9716405 0.2352612 -0.02381108 0.9716405 -0.2219062 0.01257644 -0.9749869 0.2219062 -0.01257644 0.9749869 -0.1955676 0.0003840159 -0.9806901 0.1955676 -0.0003840159 0.9806901 -0.1659208 0.004421093 -0.9861292 0.1659208 -0.004421093 0.9861292 -0.1300436 0.01430058 -0.9914051 0.1300436 -0.01430058 0.9914051 -0.08632236 0.01801265 -0.9961044 0.08632236 -0.01801265 0.9961044 -0.03642681 0.01559448 -0.9992146 0.03642681 -0.01559448 0.9992146 0.005088932 0.008121451 -0.9999541 -0.005088932 -0.008121451 0.9999541 0.02306289 0.007721667 -0.9997042 -0.02306289 -0.007721667 0.9997042 0.02932158 0.007376524 -0.9995428 -0.02932158 -0.007376524 0.9995428 0.03444803 0.006946223 -0.9993824 -0.03444803 -0.006946223 0.9993824 0.03824653 0.006439943 -0.9992476 -0.03824653 -0.006439943 0.9992476 0.04057221 0.005868781 -0.9991594 -0.04057221 -0.005868781 0.9991594 0.0413363 0.005245262 -0.9991315 -0.0413363 -0.005245262 0.9991315 0.04050933 0.004582912 -0.9991687 -0.04050933 -0.004582912 0.9991687 0.05756116 0.008119426 -0.998309 -0.05756116 -0.008119426 0.998309 0.1523962 0.03940605 -0.9875336 -0.1523962 -0.03940605 0.9875336 0.2776045 0.08827451 -0.9566312 -0.2776045 -0.08827451 0.9566312 0.3544866 0.1376375 -0.9248758 -0.3544866 -0.1376375 0.9248758 0.3774324 0.1849656 -0.9073767 -0.3774324 -0.1849656 0.9073767 0.3479051 0.2292779 -0.909062 -0.3479051 -0.2292779 0.909062 0.2693259 0.2677946 -0.9250674 -0.2693259 -0.2677946 0.9250674 0.1491287 0.2952337 -0.9437148 -0.1491287 -0.2952337 0.9437148 0.001990969 0.3050886 -0.9523219 -0.001990969 -0.3050886 0.9523219 -0.1456468 0.2941487 -0.944597 0.1456468 -0.2941487 0.944597 -0.2227452 0.2774897 -0.9345502 0.2227452 -0.2774897 0.9345502 0.2580738 -0.09901481 -0.961038 -0.2580738 0.09901481 0.961038 0.2083066 -0.06912991 -0.9756175 -0.2083066 0.06912991 0.9756175 0.1466634 0.000255354 -0.9891864 -0.1466634 -0.000255354 0.9891864 0.07452019 0.08023151 -0.9939867 -0.07452019 -0.08023151 0.9939867 -0.001223045 0.1714465 -0.9851927 0.001223045 -0.1714465 0.9851927 -0.06808243 0.256664 -0.9640998 0.06808243 -0.256664 0.9640998 -0.09438674 0.324881 -0.9410332 0.09438674 -0.324881 0.9410332 0.1220333 -0.05159256 -0.9911842 -0.1220333 0.05159256 0.9911842 0.1362054 -0.06804691 -0.9883409 -0.1362054 0.06804691 0.9883409 0.1516371 -0.08941745 -0.9843834 -0.1516371 0.08941745 0.9843834 0.1523716 -0.09966084 -0.9832856 -0.1523716 0.09966084 0.9832856 0.1384982 -0.1023242 -0.9850624 -0.1384982 0.1023242 0.9850624 0.1111701 -0.101212 -0.9886341 -0.1111701 0.101212 0.9886341 0.07309621 -0.09542695 -0.992749 -0.07309621 0.09542695 0.992749 0.02697858 -0.08401247 -0.9960994 -0.02697858 0.08401247 0.9960994 -0.02382168 -0.06899431 -0.9973326 0.02382168 0.06899431 0.9973326 -0.07545906 -0.05259472 -0.9957609 0.07545906 0.05259472 0.9957609 -0.1261896 -0.03427576 -0.9914138 0.1261896 0.03427576 0.9914138 -0.1739881 -0.01416298 -0.9846459 0.1739881 0.01416298 0.9846459 -0.2118104 0.001314842 -0.9773099 0.2118104 -0.001314842 0.9773099 -0.2369572 0.009953374 -0.9714691 0.2369572 -0.009953374 0.9714691 -0.2482428 0.01102032 -0.9686352 0.2482428 -0.01102032 0.9686352 -0.2451251 0.004396834 -0.9694815 0.2451251 -0.004396834 0.9694815 -0.2282314 -0.006771949 -0.9735834 0.2282314 0.006771949 0.9735834 -0.2040189 -0.004659493 -0.9789559 0.2040189 0.004659493 0.9789559 -0.173558 0.008017425 -0.984791 0.173558 -0.008017425 0.984791 -0.1344731 0.01504093 -0.9908031 0.1344731 -0.01504093 0.9908031 -0.08765999 0.01561979 -0.996028 0.08765999 -0.01561979 0.996028 -0.03510436 0.01010759 -0.9993325 0.03510436 -0.01010759 0.9993325 0.006165636 0.004871133 -0.9999691 -0.006165636 -0.004871133 0.9999691 0.02321355 0.003920068 -0.9997228 -0.02321355 -0.003920068 0.9997228 0.02951649 0.003751308 -0.9995573 -0.02951649 -0.003751308 0.9995573 0.0346834 0.003539636 -0.9993921 -0.0346834 -0.003539636 0.9993921 0.03851732 0.003289225 -0.9992525 -0.03851732 -0.003289225 0.9992525 0.04087249 0.003005462 -0.9991599 -0.04087249 -0.003005462 0.9991599 0.04165954 0.00269453 -0.9991282 -0.04165954 -0.00269453 0.9991282 0.04084858 0.002363159 -0.9991626 -0.04084858 -0.002363159 0.9991626 0.04525856 0.002017877 -0.9989733 -0.04525856 -0.002017877 0.9989733 0.1172889 0.02687132 -0.9927342 -0.1172889 -0.02687132 0.9927342 0.2349988 0.08134086 -0.9685862 -0.2349988 -0.08134086 0.9685862 0.3142369 0.1429647 -0.9385181 -0.3142369 -0.1429647 0.9385181 0.3413449 0.2027324 -0.9178144 -0.3413449 -0.2027324 0.9178144 0.3171399 0.2587622 -0.9123949 -0.3171399 -0.2587622 0.9123949 0.2456907 0.3078459 -0.9191665 -0.2456907 -0.3078459 0.9191665 0.1358504 0.3441099 -0.9290495 -0.1358504 -0.3441099 0.9290495 0.002443386 0.3604904 -0.9327597 -0.002443386 -0.3604904 0.9327597 -0.1306221 0.3532136 -0.926379 0.1306221 -0.3532136 0.926379 -0.2003616 0.3346169 -0.9208077 0.2003616 -0.3346169 0.9208077 0.2054714 -0.1047661 -0.9730393 -0.2054714 0.1047661 0.9730393 0.1452017 -0.0724139 -0.9867485 -0.1452017 0.0724139 0.9867485 0.07561365 0.0001804482 -0.9971372 -0.07561365 -0.0001804482 0.9971372 7.033731e-005 0.08201078 -0.9966314 -7.033731e-005 -0.08201078 0.9966314 -0.07187921 0.1708115 -0.9826784 0.07187921 -0.1708115 0.9826784 -0.101973 0.2537114 -0.9618898 0.101973 -0.2537114 0.9618898 0.1298412 -0.06293052 -0.9895358 -0.1298412 0.06293052 0.9895358 0.1403151 -0.08824182 -0.9861668 -0.1403151 0.08824182 0.9861668 0.1495857 -0.1047818 -0.983181 -0.1495857 0.1047818 0.983181 0.1439452 -0.1137149 -0.9830304 -0.1439452 0.1137149 0.9830304 0.1239894 -0.117829 -0.9852629 -0.1239894 0.117829 0.9852629 0.09208642 -0.1160381 -0.9889668 -0.09208642 0.1160381 0.9889668 0.0504536 -0.1076978 -0.9929026 -0.0504536 0.1076978 0.9929026 0.001945432 -0.09456131 -0.9955171 -0.001945432 0.09456131 0.9955171 -0.04983066 -0.07873749 -0.9956492 0.04983066 0.07873749 0.9956492 -0.1016579 -0.06162715 -0.9929087 0.1016579 0.06162715 0.9929087 -0.1533649 -0.04133277 -0.9873048 0.1533649 0.04133277 0.9873048 -0.1987254 -0.02265397 -0.9797934 0.1987254 0.02265397 0.9797934 -0.2327007 -0.009939609 -0.9724976 0.2327007 0.009939609 0.9724976 -0.2535496 -0.004332283 -0.9673127 0.2535496 0.004332283 0.9673127 -0.2603946 -0.006247572 -0.9654821 0.2603946 0.006247572 0.9654821 -0.25324 -0.01453893 -0.9672942 0.25324 0.01453893 0.9672942 -0.2359089 -0.01309334 -0.971687 0.2359089 0.01309334 0.971687 -0.2117633 0.001240087 -0.9773202 0.2117633 -0.001240087 0.9773202 -0.1783946 0.01164371 -0.9838901 0.1783946 -0.01164371 0.9838901 -0.1361147 0.01539562 -0.9905735 0.1361147 -0.01539562 0.9905735 -0.08638496 0.01262076 -0.9961819 0.08638496 -0.01262076 0.9961819 -0.03269291 0.008383055 -0.9994303 0.03269291 -0.008383055 0.9994303 0.143172 -0.1091951 -0.9836555 -0.143172 0.1091951 0.9836555 0.07466267 -0.07466267 -0.9944099 -0.07466267 0.07466267 0.9944099 0 0 -1 -0 -0 1 -0.07450525 0.08126578 -0.9939038 0.07450525 -0.08126578 0.9939038 -0.1081019 0.1685428 -0.9797486 0.1081019 -0.1685428 0.9797486 0.1348862 -0.08517817 -0.9871932 -0.1348862 0.08517817 0.9871932 0.1414394 -0.1069016 -0.984158 -0.1414394 0.1069016 0.984158 0.1439561 -0.1224315 -0.9819812 -0.1439561 0.1224315 0.9819812 0.1318686 -0.1321146 -0.9824237 -0.1318686 0.1321146 0.9824237 0.1070096 -0.1346317 -0.9851006 -0.1070096 0.1346317 0.9851006 0.07110221 -0.1298515 -0.9889808 -0.07110221 0.1298515 0.9889808 0.02645915 -0.1192536 -0.9925112 -0.02645915 0.1192536 0.9925112 -0.0237691 -0.1046989 -0.9942199 0.0237691 0.1046989 0.9942199 -0.0758984 -0.08823764 -0.9932037 0.0758984 0.08823764 0.9932037 -0.1286756 -0.06934481 -0.9892593 0.1286756 0.06934481 0.9892593 -0.1797766 -0.04856193 -0.9825081 0.1797766 0.04856193 0.9825081 -0.2216796 -0.03212845 -0.9745901 0.2216796 0.03212845 0.9745901 -0.2514921 -0.02212179 -0.9676065 0.2514921 0.02212179 0.9676065 -0.267895 -0.01930822 -0.9632546 0.267895 0.01930822 0.9632546 -0.2703994 -0.02355271 -0.9624601 0.2703994 0.02355271 0.9624601 -0.2609998 -0.02142335 -0.9651011 0.2609998 0.02142335 0.9651011 -0.2437994 -0.005910532 -0.9698077 0.2437994 0.005910532 0.9698077 -0.216912 0.007900897 -0.9761592 0.216912 -0.007900897 0.9761592 -0.1802893 0.01494506 -0.9835001 0.1802893 -0.01494506 0.9835001 -0.1349476 0.01514552 -0.990737 0.1349476 -0.01514552 0.990737 -0.08378583 0.01225475 -0.9964084 0.08378583 -0.01225475 0.9964084 0.07351186 -0.1119489 -0.9909911 -0.07351186 0.1119489 0.9909911 -0.0001804482 -0.07561365 -0.9971372 0.0001804482 0.07561365 0.9971372 -0.07561365 -0.0001804482 -0.9971372 0.07561365 0.0001804482 0.9971372 -0.1118944 0.07983551 -0.9905079 0.1118944 -0.07983551 0.9905079 0.1365795 -0.1065671 -0.9848804 -0.1365795 0.1065671 0.9848804 0.1384148 -0.1285245 -0.9819994 -0.1384148 0.1285245 0.9819994 0.1344742 -0.1438515 -0.98042 -0.1344742 0.1438515 0.98042 0.1172863 -0.1508959 -0.9815673 -0.1172863 0.1508959 0.9815673 0.08807754 -0.1500432 -0.9847484 -0.08807754 0.1500432 0.9847484 0.04864189 -0.1425279 -0.9885948 -0.04864189 0.1425279 0.9885948 0.001599693 -0.1299437 -0.9915201 -0.001599693 0.1299437 0.9915201 -0.04964692 -0.1142287 -0.9922132 0.04964692 0.1142287 0.9922132 -0.1020993 -0.09675959 -0.9900572 0.1020993 0.09675959 0.9900572 -0.1558606 -0.07587426 -0.9848607 0.1558606 0.07587426 0.9848607 -0.2044897 -0.05628514 -0.9772492 0.2044897 0.05628514 0.9772492 -0.2424833 -0.04215441 -0.9692393 0.2424833 0.04215441 0.9692393 -0.2679156 -0.03468825 -0.9628177 0.2679156 0.03468825 0.9628177 -0.2798312 -0.0342999 -0.9594363 0.2798312 0.0342999 0.9594363 -0.2789809 -0.03019619 -0.9598218 0.2789809 0.03019619 0.9598218 -0.2690953 -0.01332758 -0.9630213 0.2690953 0.01332758 0.9630213 -0.2491772 0.003888977 -0.9684501 0.2491772 -0.003888977 0.9684501 -0.2190105 0.01430158 -0.9756177 0.2190105 -0.01430158 0.9756177 -0.1792148 0.01765481 -0.9836515 0.1792148 -0.01765481 0.9836515 -0.1322923 0.01620511 -0.9910783 0.1322923 -0.01620511 0.9910783 -0.0002096398 -0.112666 -0.9936329 0.0002096398 0.112666 0.9936329 -0.07486729 -0.07486729 -0.9943791 0.07486729 0.07486729 0.9943791 -0.112666 -0.0002096398 -0.9936329 0.112666 0.0002096398 0.9936329 0.1337583 -0.1312783 -0.9822804 -0.1337583 0.1312783 0.9822804 0.1316572 -0.1528604 -0.9794387 -0.1316572 0.1528604 0.9794387 0.1225457 -0.1645788 -0.9787218 -0.1225457 0.1645788 0.9787218 0.1007493 -0.1679179 -0.9806392 -0.1007493 0.1679179 0.9806392 0.06759773 -0.1639087 -0.9841567 -0.06759773 0.1639087 0.9841567 0.02517525 -0.1538943 -0.9877665 -0.02517525 0.1538943 0.9877665 -0.02361109 -0.1395607 -0.989932 0.02361109 0.1395607 0.989932 -0.0752436 -0.1228188 -0.9895726 0.0752436 0.1228188 0.9895726 -0.128648 -0.1035161 -0.9862728 0.128648 0.1035161 0.9862728 -0.1818908 -0.0819658 -0.9798966 0.1818908 0.0819658 0.9798966 -0.2270514 -0.06414621 -0.9717679 0.2270514 0.06414621 0.9717679 -0.2608338 -0.05222786 -0.9639699 0.2608338 0.05222786 0.9639699 -0.2817621 -0.04704208 -0.9583304 0.2817621 0.04704208 0.9583304 -0.289702 -0.03977765 -0.95629 0.289702 0.03977765 0.95629 -0.2873093 -0.0209201 -0.9576094 0.2873093 0.0209201 0.9576094 -0.274634 -0.0003239942 -0.9615488 0.274634 0.0003239942 0.9615488 -0.2514375 0.01349546 -0.9677794 0.2514375 -0.01349546 0.9677794 -0.2180316 0.02013045 -0.9757341 0.2180316 -0.02013045 0.9757341 -0.176509 0.02024497 -0.9840908 0.176509 -0.02024497 0.9840908 -0.07388437 -0.111169 -0.9910512 0.07388437 0.111169 0.9910512 -0.111169 -0.07388437 -0.9910512 0.111169 0.07388437 0.9910512 0.1277651 -0.1576349 -0.9791973 -0.1277651 0.1576349 0.9791973 0.122609 -0.1754751 -0.9768191 -0.122609 0.1754751 0.9768191 0.1087017 -0.1831901 -0.9770493 -0.1087017 0.1831901 0.9770493 0.0826419 -0.1830003 -0.9796332 -0.0826419 0.1830003 0.9796332 0.04601788 -0.1760366 -0.9833074 -0.04601788 0.1760366 0.9833074 0.001200763 -0.1637336 -0.9865039 -0.001200763 0.1637336 0.9865039 -0.04865389 -0.1478489 -0.9878125 0.04865389 0.1478489 0.9878125 -0.1005506 -0.1298903 -0.9864168 0.1005506 0.1298903 0.9864168 -0.1549059 -0.1085265 -0.9819502 0.1549059 0.1085265 0.9819502 -0.2058673 -0.08770611 -0.9746416 0.2058673 0.08770611 0.9746416 -0.2471213 -0.07168621 -0.9663292 0.2471213 0.07168621 0.9663292 -0.2764945 -0.06179524 -0.9590267 0.2764945 0.06179524 0.9590267 -0.2931178 -0.05033168 -0.9547506 0.2931178 0.05033168 0.9547506 -0.2982611 -0.0286103 -0.9540554 0.2982611 0.0286103 0.9540554 -0.2929543 -0.004679457 -0.956115 0.2929543 0.004679457 0.956115 -0.2770206 0.01255569 -0.9607819 0.2770206 -0.01255569 0.9607819 -0.2505544 0.02255033 -0.9678399 0.2505544 -0.02255033 0.9678399 -0.2153221 0.02430531 -0.9762406 0.2153221 -0.02430531 0.9762406 -0.1091091 -0.1091091 -0.9880235 0.1091091 0.1091091 0.9880235 0.1197345 -0.1816485 -0.9760469 -0.1197345 0.1816485 0.9760469 0.1117176 -0.19563 -0.9742936 -0.1117176 0.19563 0.9742936 0.09330612 -0.1994839 -0.9754487 -0.09330612 0.1994839 0.9754487 0.06338736 -0.195937 -0.9785657 -0.06338736 0.195937 0.9785657 0.02381357 -0.1862024 -0.9822228 -0.02381357 0.1862024 0.9822228 -0.02277409 -0.1717926 -0.9848699 0.02277409 0.1717926 0.9848699 -0.07304397 -0.1544977 -0.9852893 0.07304397 0.1544977 0.9852893 -0.1256829 -0.1347745 -0.9828732 0.1256829 0.1347745 0.9828732 -0.179769 -0.1123302 -0.9772743 0.179769 0.1123302 0.9772743 -0.2273695 -0.09272497 -0.9693839 0.2273695 0.09272497 0.9693839 -0.264408 -0.07831274 -0.961226 0.264408 0.07831274 0.961226 -0.289255 -0.0618049 -0.9552548 0.289255 0.0618049 0.9552548 -0.3018727 -0.03632775 -0.9526559 0.3018727 0.03632775 0.9526559 -0.3039668 -0.009127167 -0.9526389 0.3039668 0.009127167 0.9526389 -0.2954378 0.01150968 -0.9552926 0.2954378 -0.01150968 0.9552926 -0.2762297 0.02490413 -0.9607689 0.2762297 -0.02490413 0.9607689 -0.2478767 0.0283432 -0.9683769 0.2478767 -0.0283432 0.9683769 0.109974 -0.2029652 -0.9729907 -0.109974 0.2029652 0.9729907 0.0993262 -0.2131065 -0.971967 -0.0993262 0.2131065 0.971967 0.07675693 -0.2132451 -0.9739789 -0.07675693 0.2132451 0.9739789 0.04343383 -0.2065002 -0.9774821 -0.04343383 0.2065002 0.9774821 0.001470634 -0.1941562 -0.9809695 -0.001470634 0.1941562 0.9809695 -0.04625327 -0.1777918 -0.9829805 0.04625327 0.1777918 0.9829805 -0.09659003 -0.1589873 -0.9825444 0.09659003 0.1589873 0.9825444 -0.1500038 -0.1372077 -0.9791184 0.1500038 0.1372077 0.9791184 -0.2023191 -0.1148999 -0.9725559 0.2023191 0.1148999 0.9725559 -0.2459847 -0.0960752 -0.9645004 0.2459847 0.0960752 0.9645004 -0.2782058 -0.07391577 -0.9576732 0.2782058 0.07391577 0.9576732 -0.2981291 -0.04400177 -0.9535108 0.2981291 0.04400177 0.9535108 -0.3075969 -0.01362223 -0.9514192 0.3075969 0.01362223 0.9514192 -0.3065228 0.01038204 -0.9518067 0.3065228 -0.01038204 0.9518067 -0.2947338 0.02718838 -0.9551925 0.2947338 -0.02718838 0.9551925 -0.273613 0.03233124 -0.9612963 0.273613 -0.03233124 0.9612963 0.09882502 -0.2213483 -0.9701745 -0.09882502 0.2213483 0.9701745 0.08580654 -0.2276851 -0.9699468 -0.08580654 0.2276851 0.9699468 0.05947277 -0.2242469 -0.9727159 -0.05947277 0.2242469 0.9727159 0.02323894 -0.2144459 -0.9764594 -0.02323894 0.2144459 0.9764594 -0.02053312 -0.1996315 -0.9796559 0.02053312 0.1996315 0.9796559 -0.06877081 -0.1814376 -0.9809949 0.06877081 0.1814376 0.9809949 -0.1194565 -0.1610163 -0.9796958 0.1194565 0.1610163 0.9796958 -0.1728881 -0.1376582 -0.9752743 0.1728881 0.1376582 0.9752743 -0.2218433 -0.114294 -0.9683607 0.2218433 0.114294 0.9683607 -0.2601448 -0.08615101 -0.9617186 0.2601448 0.08615101 0.9617186 -0.2870646 -0.05155338 -0.956523 0.2870646 0.05155338 0.956523 -0.3038259 -0.01812089 -0.9525552 0.3038259 0.01812089 0.9525552 -0.3102032 0.009194231 -0.9506258 0.3102032 -0.009194231 0.9506258 -0.3059007 0.02940246 -0.9516093 0.3059007 -0.02940246 0.9516093 -0.2922029 0.03625225 -0.955669 0.2922029 -0.03625225 0.955669 0.08665274 -0.2365703 -0.9677426 -0.08665274 0.2365703 0.9677426 0.07154714 -0.2391439 -0.9683446 -0.07154714 0.2391439 0.9683446 0.04187957 -0.2322542 -0.9717531 -0.04187957 0.2322542 0.9717531 0.00325399 -0.2195206 -0.9756024 -0.00325399 0.2195206 0.9756024 -0.04174482 -0.2023547 -0.9784222 0.04174482 0.2023547 0.9784222 -0.08996751 -0.1823719 -0.9791049 0.08996751 0.1823719 0.9791049 -0.1410319 -0.1598633 -0.9770127 0.1410319 0.1598633 0.9770127 -0.1926226 -0.131889 -0.9723692 0.1926226 0.131889 0.9723692 -0.2353634 -0.09777424 -0.9669769 0.2353634 0.09777424 0.9669769 -0.2687757 -0.05888795 -0.961401 0.2687757 0.05888795 0.961401 -0.2926776 -0.02257588 -0.9559446 0.2926776 0.02257588 0.9559446 -0.3064599 0.007965015 -0.9518503 0.3064599 -0.007965015 0.9518503 -0.3096594 0.0315449 -0.9503242 0.3096594 -0.0315449 0.9503242 -0.3034769 0.04009421 -0.9519949 0.3034769 -0.04009421 0.9519949 0.07383369 -0.2484108 -0.9658368 -0.07383369 0.2484108 0.9658368 0.05694082 -0.24726 -0.9672746 -0.05694082 0.24726 0.9672746 0.02439703 -0.237028 -0.9711964 -0.02439703 0.237028 0.9711964 -0.01609067 -0.2214693 -0.9750346 0.01609067 0.2214693 0.9750346 -0.06171888 -0.2019614 -0.9774469 0.06171888 0.2019614 0.9774469 -0.1091244 -0.1783235 -0.9779021 0.1091244 0.1783235 0.9779021 -0.1580409 -0.1470326 -0.9764243 0.1580409 0.1470326 0.9764243 -0.2042617 -0.1079031 -0.9729512 0.2042617 0.1079031 0.9729512 -0.2434592 -0.06588997 -0.9676705 0.2434592 0.06588997 0.9676705 -0.2742321 -0.02693187 -0.9612863 0.2742321 0.02693187 0.9612863 -0.2953126 0.006711439 -0.9553771 0.2953126 -0.006711439 0.9553771 -0.3059931 0.03361012 -0.9514403 0.3059931 -0.03361012 0.9514403 -0.3073628 0.04384496 -0.9505818 0.3073628 -0.04384496 0.9505818 0.06105859 -0.2563591 -0.9646512 -0.06105859 0.2563591 0.9646512 0.04302327 -0.2511998 -0.9669786 -0.04302327 0.2511998 0.9669786 0.008246955 -0.237229 -0.9714187 -0.008246955 0.237229 0.9714187 -0.03257921 -0.2172352 -0.9755754 0.03257921 0.2172352 0.9755754 -0.07625662 -0.1910271 -0.9786182 0.07625662 0.1910271 0.9786182 -0.1204329 -0.1576308 -0.9801267 0.1204329 0.1576308 0.9801267 -0.1670554 -0.1160836 -0.9790899 0.1670554 0.1160836 0.9790899 -0.2114694 -0.07242147 -0.9746978 0.2114694 0.07242147 0.9746978 -0.2486662 -0.03112216 -0.9680891 0.2486662 0.03112216 0.9680891 -0.2768346 0.005450069 -0.9609021 0.2768346 -0.005450069 0.9609021 -0.2949246 0.0355856 -0.9548577 0.2949246 -0.0355856 0.9548577 -0.3038432 0.04748702 -0.9515378 0.3038432 -0.04748702 0.9515378 0.05222595 -0.2568976 -0.9650265 -0.05222595 0.2568976 0.9650265 0.03614921 -0.2443352 -0.9690168 -0.03614921 0.2443352 0.9690168 0.0002706909 -0.2245959 -0.9744519 -0.0002706909 0.2245959 0.9744519 -0.03961208 -0.1971932 -0.979564 0.03961208 0.1971932 0.979564 -0.08151716 -0.1627205 -0.983299 0.08151716 0.1627205 0.983299 -0.1261765 -0.1225644 -0.9844072 0.1261765 0.1225644 0.9844072 -0.1733824 -0.07832571 -0.981735 0.1733824 0.07832571 0.981735 -0.2163138 -0.03506728 -0.9756939 0.2163138 0.03506728 0.9756939 -0.2511934 0.004198224 -0.9679278 0.2511934 -0.004198224 0.9679278 -0.27653 0.03744978 -0.9602753 0.27653 -0.03744978 0.9602753 -0.292942 0.05099301 -0.9547695 0.292942 -0.05099301 0.9547695 0.05335468 -0.2435847 -0.968411 -0.05335468 0.2435847 0.968411 0.03716006 -0.2243881 -0.9737911 -0.03716006 0.2243881 0.9737911 -0.0002124458 -0.1983522 -0.9801308 0.0002124458 0.1983522 0.9801308 -0.04145607 -0.1651659 -0.9853941 0.04145607 0.1651659 0.9853941 -0.08454696 -0.1263558 -0.9883754 0.08454696 0.1263558 0.9883754 -0.1307129 -0.08325727 -0.9879182 0.1307129 0.08325727 0.9879182 -0.1777357 -0.03867692 -0.9833179 0.1777357 0.03867692 0.9833179 -0.2187133 0.00297494 -0.9757846 0.2187133 -0.00297494 0.9757846 -0.2509793 0.03917098 -0.9671996 0.2509793 -0.03917098 0.9671996 -0.2747365 0.05432202 -0.9599839 0.2747365 -0.05432202 0.9599839 0.05669497 -0.2224249 -0.9733 -0.05669497 0.2224249 0.9733 0.03926504 -0.1973448 -0.9795475 -0.03926504 0.1973448 0.9795475 -0.0001773521 -0.1659652 -0.9861316 0.0001773521 0.1659652 0.9861316 -0.04291302 -0.1283741 -0.9907969 0.04291302 0.1283741 0.9907969 -0.08712831 -0.08639277 -0.9924439 0.08712831 0.08639277 0.9924439 -0.1340214 -0.04177058 -0.9900977 0.1340214 0.04177058 0.9900977 -0.1799475 0.001801349 -0.9836746 0.1799475 -0.001801349 0.9836746 -0.2185984 0.04070796 -0.9749654 0.2185984 -0.04070796 0.9749654 -0.2493978 0.05741767 -0.9666974 0.2493978 -0.05741767 0.9666974 0.05969917 -0.1956339 -0.9788582 -0.05969917 0.1956339 0.9788582 0.04110834 -0.1650981 -0.9854201 -0.04110834 0.1650981 0.9854201 -0.0001403768 -0.1290047 -0.991644 0.0001403768 0.1290047 0.991644 -0.0440141 -0.08788611 -0.9951577 0.0440141 0.08788611 0.9951577 -0.0890167 -0.04387815 -0.9950632 0.0890167 0.04387815 0.9950632 -0.1358055 0.0007318952 -0.9907352 0.1358055 -0.0007318952 0.9907352 -0.1799401 0.04201263 -0.98278 0.1799401 -0.04201263 0.98278 -0.2172518 0.06020857 -0.9742569 0.2172518 -0.06020857 0.9742569 0.06229606 -0.1636677 -0.9845466 -0.06229606 0.1636677 0.9845466 0.04264201 -0.1283002 -0.9908182 -0.04264201 0.1283002 0.9908182 -0.0001056332 -0.08834091 -0.9960903 0.0001056332 0.08834091 0.9960903 -0.04472791 -0.04472791 -0.9979974 0.04472791 0.04472791 0.9979974 -0.08995331 -1.337704e-005 -0.995946 0.08995331 1.337704e-005 0.995946 -0.135857 0.04304341 -0.989793 0.135857 -0.04304341 0.989793 -0.1788493 0.06261257 -0.9818822 0.1788493 -0.06261257 0.9818822 0.06441511 -0.1271729 -0.9897867 -0.06441511 0.1271729 0.9897867 0.04382767 -0.08781083 -0.9951725 -0.04382767 0.08781083 0.9951725 -9.005776e-005 -0.04504206 -0.9989851 9.005776e-005 0.04504206 0.9989851 -0.04504206 -9.005776e-005 -0.9989851 0.04504206 9.005776e-005 0.9989851 -0.08972977 0.04385202 -0.9950003 0.08972977 -0.04385202 0.9950003 -0.135029 0.06454627 -0.9887371 0.135029 -0.06454627 0.9887371 0.06599422 -0.08699958 -0.99402 -0.06599422 0.08699958 0.99402 0.04465764 -0.04465764 -0.9980037 -0.04465764 0.04465764 0.9980037 -5.526544e-019 0 -1 5.526544e-019 -0 1 -0.04465764 0.04465764 -0.9980037 0.04465764 -0.04465764 0.9980037 -0.08902811 0.06598238 -0.9938412 0.08902811 -0.06598238 0.9938412 0.06696635 -0.04417341 -0.9967769 -0.06696635 0.04417341 0.9967769 0.04504206 9.005776e-005 -0.9989851 -0.04504206 -9.005776e-005 0.9989851 9.005776e-005 0.04504206 -0.9989851 -9.005776e-005 -0.04504206 0.9989851 -0.04417341 0.06696635 -0.9967769 0.04417341 -0.06696635 0.9967769 0.06721049 8.980629e-005 -0.9977388 -0.06721049 -8.980629e-005 0.9977388 0.04472791 0.04472791 -0.9979974 -0.04472791 -0.04472791 0.9979974 8.980629e-005 0.06721049 -0.9977388 -8.980629e-005 -0.06721049 0.9977388 0.06664441 0.04431754 -0.9967921 -0.06664441 -0.04431754 0.9967921 0.04431754 0.06664441 -0.9967921 -0.04431754 -0.06664441 0.9967921 0.06587027 0.06587027 -0.9956517 -0.06587027 -0.06587027 0.9956517 + + + + + + + + + + -3.667071 2.34386 -3.548333 2.383204 -3.628003 2.224281 -4.159826 0.9958827 -4.081572 0.8978514 -4.179047 0.8191505 -4.124087 1.465703 -4.054856 1.361391 -4.158934 1.292004 -4.341292 0.3408204 -4.250574 0.253669 -4.336568 0.1617301 -2.884149 3.15561 -2.759542 3.165549 -2.874245 3.030567 -3.993083 1.477039 -3.884256 1.539103 -3.931587 1.367208 -0.7841912 4.303815 -0.6723674 4.247845 -0.8400521 4.191773 -3.113286 3.18067 -2.989023 3.194272 -3.099732 3.055964 -4.299241 1.085554 -4.197395 1.159029 -4.226768 0.9823006 -4.309557 0.5134969 -4.216387 0.5981554 -4.226225 0.4188428 -4.150604 0.6492639 -4.062729 0.5601856 -4.151627 0.4721318 -3.424451 2.604689 -3.387429 2.485267 -3.506821 2.448236 -4.169116 0.2798635 -4.077104 0.1945278 -4.161713 0.1017252 -4.256436 0.1049054 -4.162703 0.02088921 -4.245402 -0.0743376 4.402411 0.101039 4.297972 0.1697611 4.366655 0.2742589 -4.35228 1.045244 -4.270921 0.9503364 -4.365819 0.8689691 -4.445646 0.2352427 -4.351308 0.1525648 -4.433315 0.05745437 -4.416222 -0.3569469 -4.314281 -0.4312555 -4.386622 -0.53597 -4.304469 -0.5121933 -4.201143 -0.5844947 -4.271492 -0.6906885 -1.727218 3.830085 -1.605062 3.803542 -1.753732 3.707799 -3.806548 1.724045 -3.694914 1.780563 -3.750308 1.611859 2.275741 3.651131 2.323612 3.53548 2.160271 3.603185 -4.14955 0.3397487 -4.059778 0.4275666 -4.062568 0.2491143 -4.236405 -0.2736378 -4.158254 -0.1746448 -4.138848 -0.352939 4.06433 1.698934 4.03843 1.576166 3.942043 1.724936 3.07935 3.250011 3.102378 3.126944 2.956489 3.226944 -1.050862 4.428863 -0.93652 4.378319 -1.101368 4.314434 -4.27875 1.488815 -4.172111 1.554592 -4.213534 1.381258 -4.513121 -0.126813 -4.433215 -0.02812333 -4.416996 -0.2088507 -4.390794 -0.5032808 -4.317681 -0.3991042 -4.289406 -0.5784043 -4.164731 -1.153361 -4.106317 -1.040163 -4.05422 -1.213196 -3.422113 -2.300967 -3.297138 -2.303485 -3.299654 -2.428535 3.660332 -2.116334 3.605993 -2.003733 3.718565 -1.949381 -4.070633 -0.478655 -3.96312 -0.5427222 -4.026884 -0.6507459 4.224525 -0.9557386 4.137884 -0.8653876 4.227985 -0.7785068 -4.019542 -0.6852415 -3.911352 -0.7484941 -3.973961 -0.8577968 -4.137521 -0.4352858 -4.035179 -0.5081333 -4.106951 -0.6120086 -4.131553 -0.942162 -4.022505 -1.004728 -4.083606 -1.11639 4.295085 -1.098709 4.209784 -1.006601 4.301156 -0.9206114 4.328387 -1.24097 4.247825 -1.14502 4.3434 -1.064142 3.714637 -2.665821 3.673423 -2.547677 3.791433 -2.506416 -4.484067 -0.7596689 -4.373104 -0.8173924 -4.430656 -0.9286863 -4.49274 -0.538147 -4.386855 -0.6060437 -4.453288 -0.7142624 -4.417729 -0.8716428 -4.309132 -0.9362713 -4.371032 -1.049656 -4.280084 -1.051851 -4.169826 -1.11322 -4.228716 -1.22812 -4.047842 -1.479021 -3.93275 -1.529486 -3.981522 -1.648571 3.25982 2.532312 3.265686 2.407436 3.134958 2.526445 -3.910751 1.237138 -3.808284 1.308863 -3.839158 1.134482 4.074711 1.139917 4.034842 1.021205 3.95624 1.179868 -4.08098 0.05388851 -3.999575 0.1493259 -3.98612 -0.02801242 4.321634 0.3340671 4.261129 0.2240158 4.212252 0.3949418 -3.900525 -1.139805 -3.845862 -1.02632 -3.788111 -1.194989 -3.921495 -1.348358 -3.869711 -1.233208 -3.807726 -1.400771 -3.684854 -2.055588 -3.651894 -1.932643 -3.564277 -2.089195 4.422115 -0.3649598 4.346332 -0.4655317 4.322707 -0.2882896 4.502357 0.2164066 4.437779 0.1084651 4.39533 0.2815369 4.555942 0.4528599 4.494513 0.3435395 4.447077 0.5145454 4.646938 0.1814574 4.576885 0.07783285 4.543413 0.2515775 -4.35944 1.554896 -4.253449 1.621348 -4.293177 1.448602 -4.609772 0.02135041 -4.528399 0.1182365 -4.514886 -0.06173726 -4.525371 -0.7742345 -4.458226 -0.6642612 -4.419936 -0.8442697 -4.326474 -1.208181 -4.268692 -1.092643 -4.21563 -1.268409 -3.976584 -1.85584 -3.935617 -1.7334 -3.858488 -1.898314 -3.347505 -2.668781 -3.332768 -2.540942 -3.223377 -2.683959 -0.1476569 -4.038126 -0.07457928 -3.936514 0.02683402 -4.009735 2.965285 -2.904624 2.937768 -2.782412 3.059702 -2.754831 -3.369372 -2.185512 -3.244542 -2.192049 -3.251057 -2.317282 3.88901 -1.733528 3.819312 -1.629153 3.923078 -1.559046 -3.691114 -1.520302 -3.570914 -1.554878 -3.605219 -1.676032 4.118976 -1.439548 4.039629 -1.341839 4.136216 -1.261571 -3.538443 -1.800794 -3.417033 -1.830883 -3.446776 -1.953705 -3.822544 -1.367505 -3.707008 -1.415814 -3.754721 -1.532793 -3.79644 -1.647964 -3.678828 -1.691068 -3.721165 -1.81081 -3.611963 -2.162705 -3.490346 -2.192322 -3.519226 -2.317043 4.225626 -1.452438 4.144566 -1.355613 4.239721 -1.27313 4.325922 -1.359177 4.244514 -1.262923 4.33937 -1.180316 4.25202 -1.753351 4.181399 -1.649188 4.284539 -1.577868 3.774559 -2.739514 3.732551 -2.621102 3.850281 -2.578851 -1.764819 -4.291554 -1.658174 -4.226276 -1.592968 -4.333038 -4.443864 -1.248405 -4.328177 -1.296424 -4.375523 -1.413756 -4.502912 -0.9219731 -4.392791 -0.9834575 -4.451937 -1.097931 -4.437071 -1.128787 -4.326215 -1.189882 -4.383975 -1.307139 -4.261729 -1.39024 -4.14828 -1.445405 -4.200763 -1.564652 -3.950355 -1.901528 -3.83172 -1.942473 -3.871099 -2.065824 -3.356509 -2.66217 -3.23243 -2.677744 -3.247574 -2.805339 2.44398 -3.227009 2.319195 -3.234346 2.436652 -3.102062 -3.499133 -1.983936 -3.48491 -1.859664 -3.374945 -1.998169 3.878079 -1.493558 3.776978 -1.567312 3.80457 -1.392121 -3.859203 -1.045602 -3.809739 -0.9303664 -3.744406 -1.095255 4.114648 -1.107492 4.022991 -1.193125 4.029655 -1.015145 -3.627789 -1.62758 -3.591869 -1.506896 -3.508062 -1.663787 4.187417 -1.231891 4.094926 -1.317042 4.103331 -1.13823 -3.019377 -2.552352 -3.012623 -2.426161 -2.894559 -2.55918 -3.132525 -2.557064 -3.120775 -2.431277 -3.008079 -2.56894 -2.61083 -3.176145 -2.621408 -3.04958 -2.486279 -3.165396 -1.819803 -3.737663 -1.856846 -3.615118 -1.700418 -3.69964 4.201867 -1.517848 4.10546 -1.598775 4.122301 -1.419792 4.434319 -0.9638226 4.34722 -1.055047 4.344659 -0.8752041 4.543846 -0.7584199 4.459152 -0.8517032 4.451911 -0.6724833 4.563071 -1.016114 4.471694 -1.102228 4.477775 -0.9238609 4.34901 -1.892775 4.24173 -1.957254 4.284853 -1.784956 -3.38137 -3.297883 -3.39624 -3.173751 -3.257257 -3.283011 -4.700499 -0.194292 -4.625272 -0.09322975 -4.60067 -0.2704479 -4.649223 -0.5907535 -4.579305 -0.4832361 -4.545606 -0.6633029 -4.505615 -1.211115 -4.446941 -1.094399 -4.395241 -1.273161 -4.233616 -1.701113 -4.18661 -1.579147 -4.117792 -1.750613 -3.762123 -2.38554 -3.734687 -2.258176 -3.640171 -2.414193 -2.960748 -3.179802 -2.96225 -3.050326 -2.835758 -3.178247 -1.092867 -4.064896 -1.149438 -3.949529 -0.9814009 -4.006345 0.481386 -3.920829 0.5347331 -3.807069 0.6477777 -3.860754 2.218262 -3.408936 2.214904 -3.283213 2.339859 -3.279834 -1.800946 -3.488784 -1.690784 -3.42942 -1.631713 -3.54013 3.278277 -2.593037 3.233689 -2.475208 3.350466 -2.430217 -2.940588 -2.56493 -2.815931 -2.555604 -2.806676 -2.681224 3.734366 -2.112009 3.670917 -2.002859 3.778616 -1.938556 -3.05326 -2.38984 -2.928289 -2.39257 -2.930987 -2.519021 3.958269 -1.921954 3.886835 -1.817582 3.989413 -1.744899 -2.730828 -2.723112 -2.606006 -2.716351 -2.599336 -2.842874 -3.015921 -2.554661 -2.891095 -2.561324 -2.897682 -2.687594 -3.125652 -2.563458 -3.001175 -2.575009 -3.012596 -2.700905 -2.591176 -3.198511 -2.466691 -3.187018 -2.455358 -3.313259 -1.614367 -3.852586 -1.497108 -3.80844 -1.453804 -3.927978 4.103985 -1.874069 4.028777 -1.772314 4.128621 -1.695668 4.303104 -1.552313 4.222259 -1.454934 4.317596 -1.372357 4.351127 -1.599988 4.273172 -1.500286 4.370886 -1.420744 4.217535 -2.063823 4.152428 -1.955366 4.259134 -1.889191 3.791363 -2.868291 3.750379 -2.74875 3.86847 -2.707263 1.914455 -4.321764 1.939595 -4.198845 2.062041 -4.224083 -3.701064 -2.898807 -3.576082 -2.896686 -3.573977 -3.022655 -4.495983 -1.320175 -4.380459 -1.369367 -4.4282 -1.488402 -4.537888 -1.106186 -4.426542 -1.166205 -4.483351 -1.28384 -4.494726 -1.24445 -4.383866 -1.305624 -4.441617 -1.423053 -4.264308 -1.636852 -4.149325 -1.688275 -4.198353 -1.808874 -3.826438 -2.308057 -3.705253 -2.339798 -3.7359 -2.465311 -2.952763 -3.219346 -2.827776 -3.217496 -2.825971 -3.345603 -0.7536586 -4.170035 -0.6470718 -4.103011 -0.5817705 -4.212409 1.283901 -3.742953 1.162312 -3.713791 1.312905 -3.6207 -0.7435003 -3.865022 -0.8310569 -3.775608 -0.654288 -3.777267 2.639641 -3.098772 2.516138 -3.118182 2.620352 -2.974492 -2.860491 -2.666574 -2.873517 -2.541769 -2.736172 -2.653497 3.316654 -2.544548 3.200546 -2.591274 3.270349 -2.427382 -3.065984 -2.386993 -3.06263 -2.261077 -2.941029 -2.390372 3.588404 -2.346811 3.476852 -2.40392 3.532004 -2.233858 -2.72021 -2.739055 -2.727357 -2.612777 -2.595414 -2.731823 3.684306 -2.395419 3.57367 -2.454471 3.62613 -2.283116 -2.57006 -2.868513 -2.577987 -2.742062 -2.445311 -2.860477 -1.734597 -3.527732 -1.772971 -3.407254 -1.615633 -3.48887 -0.2739804 -3.981203 -0.352051 -3.881779 -0.1763588 -3.901691 0.5391095 -3.99776 0.4450734 -3.912776 0.6214637 -3.900721 0.9326441 -3.97418 0.8329584 -3.895267 1.008061 -3.869874 3.689423 -2.58217 3.577617 -2.638922 3.633525 -2.468656 4.180901 -1.84784 4.080141 -1.923236 4.106926 -1.745146 4.400541 -1.463755 4.305317 -1.546467 4.319563 -1.366492 4.482728 -1.42282 4.386778 -1.50456 4.402611 -1.324927 4.424474 -1.775255 4.321284 -1.846934 4.353928 -1.670408 4.138547 -2.492882 4.024341 -2.544275 4.087735 -2.377372 2.834115 -3.887486 2.709122 -3.886169 2.835429 -3.762194 -4.58737 -1.403275 -4.541656 -1.286405 -4.471029 -1.449197 -4.726593 -0.7120694 -4.660866 -0.6028307 -4.620268 -0.7795976 -4.695283 -0.819581 -4.628462 -0.708259 -4.589642 -0.8899952 -4.563071 -1.329945 -4.504493 -1.213005 -4.452646 -1.391979 -4.162773 -2.057789 -4.122572 -1.933255 -4.044414 -2.100088 -3.521064 -2.856189 -3.505493 -2.726939 -3.397037 -2.872416 -2.457202 -3.678633 -2.476037 -3.550813 -2.333629 -3.659151 -0.5393147 -4.264112 -0.6081959 -4.156084 -0.4350056 -4.192775 1.1521 -3.987874 1.050311 -3.91125 1.224654 -3.880375 0.3464635 -3.83565 0.4004429 -3.721381 0.513187 -3.77609 1.424962 -3.710776 1.444611 -3.585863 1.568057 -3.605746 -0.812329 -3.740567 -0.7235499 -3.651684 -0.635554 -3.741358 2.382382 -3.330551 2.368787 -3.204723 2.493046 -3.190957 -2.024094 -3.222508 -1.908638 -3.174096 -1.860733 -3.290772 3.011676 -2.945148 2.975023 -2.823908 3.094529 -2.786724 -2.427843 -2.897026 -2.305688 -2.870141 -2.279169 -2.993981 3.388233 -2.694809 3.338016 -2.578529 3.452485 -2.527518 -2.4588 -2.848846 -2.335271 -2.829439 -2.316152 -2.954829 3.630186 -2.568958 3.572079 -2.456615 3.682753 -2.397631 -2.59435 -2.727657 -2.469557 -2.720365 -2.462362 -2.846838 -2.485124 -2.964529 -2.360655 -2.952946 -2.349146 -3.078224 -0.8241337 -3.870812 -0.7178646 -3.804709 -0.6520477 -3.91144 0.6183489 -3.970671 0.6965897 -3.872027 0.7940749 -3.951198 1.015891 -3.9299 1.08674 -3.824168 1.189723 -3.896908 1.150705 -3.933851 1.220753 -3.825865 1.324282 -3.898928 3.821525 -2.501474 3.757885 -2.392617 3.865471 -2.328225 4.236981 -1.85705 4.158855 -1.757792 4.256432 -1.678319 4.387351 -1.641084 4.306314 -1.543875 4.401487 -1.461105 4.39504 -1.778307 4.318962 -1.676899 4.418144 -1.599114 4.228046 -2.263324 4.165524 -2.152877 4.273764 -2.089079 3.836053 -2.966696 3.794926 -2.846605 3.912966 -2.804763 2.728944 -3.96255 2.729601 -3.836359 2.854599 -3.837022 -1.290407 -4.606814 -1.191791 -4.529516 -1.11498 -4.628755 -4.209099 -2.242516 -4.085953 -2.264402 -4.107399 -2.390073 -4.57808 -1.308582 -4.46372 -1.361373 -4.514186 -1.481 -4.655807 -0.9921322 -4.547537 -1.058433 -4.610008 -1.17334 -4.625322 -1.129466 -4.517854 -1.196491 -4.581693 -1.309322 -4.299084 -1.809015 -4.183725 -1.858919 -4.231864 -1.978507 -3.639655 -2.758536 -3.516373 -2.779686 -3.537025 -2.905946 -2.259692 -3.847206 -2.137248 -3.82157 -2.112101 -3.946393 -0.2313111 -4.314765 -0.1320563 -4.236549 -0.05607292 -4.338721 1.230594 -3.971877 1.301227 -3.863107 1.404358 -3.937603 0.8308127 -3.768772 0.7122199 -3.728783 0.8703191 -3.648729 -0.1512379 -3.834941 -0.2531154 -3.761967 -0.0788088 -3.732296 1.470332 -3.693711 1.346658 -3.675337 1.488491 -3.568573 -1.647275 -3.442429 -1.707722 -3.332288 -1.537862 -3.38158 2.086614 -3.51861 1.961651 -3.521654 2.083608 -3.392085 -2.298284 -3.013073 -2.330037 -2.891008 -2.177384 -2.981014 2.487638 -3.389624 2.363781 -3.406716 2.470774 -3.264093 -2.433647 -2.87491 -2.453828 -2.749881 -2.310287 -2.854455 2.671822 -3.392584 2.548909 -3.415646 2.649073 -3.267984 -2.601552 -2.71687 -2.608423 -2.590217 -2.476741 -2.709898 2.723324 -3.500585 2.600599 -3.524618 2.699582 -3.376352 -1.742923 -3.31848 -1.778175 -3.196805 -1.622997 -3.282714 0.2975382 -3.82033 0.206446 -3.733559 0.3831373 -3.727991 1.752116 -3.498487 1.636832 -3.449161 1.800435 -3.380801 2.024211 -3.403935 1.906925 -3.35904 2.067445 -3.282145 2.018653 -3.447738 1.902901 -3.397758 2.065838 -3.325131 1.964243 -3.53201 1.850749 -3.475657 2.016628 -3.40992 2.791465 -3.602532 2.669038 -3.627994 2.766234 -3.478987 3.633035 -2.848074 3.519088 -2.900117 3.581643 -2.732684 4.142952 -2.172855 4.038403 -2.24259 4.074434 -2.066451 4.355193 -1.870712 4.254416 -1.946272 4.281241 -1.767743 4.412867 -1.891663 4.310436 -1.964936 4.341223 -1.786902 4.320569 -2.22868 4.2119 -2.291717 4.258797 -2.117785 4.058016 -2.786323 3.941514 -2.832387 4.012714 -2.667862 3.321669 -3.597191 3.197521 -3.611874 3.307098 -3.472091 -1.71596 -4.575117 -1.781749 -4.468635 -1.609673 -4.509208 -4.836746 -0.6328682 -4.770415 -0.5253848 -4.730797 -0.70016 -4.867475 -0.08348834 -4.786039 0.01517352 -4.772644 -0.1682142 -4.847113 -0.3604226 -4.767689 -0.2583175 -4.750589 -0.4444394 -4.766611 -0.9319603 -4.695516 -0.8241664 -4.663798 -1.0065 -4.118451 -2.346839 -4.082543 -2.222272 -3.99872 -2.384198 -3.158488 -3.386376 -3.157287 -3.257468 -3.033494 -3.387615 -1.894555 -4.117901 -1.929711 -3.994888 -1.774601 -4.081849 -0.1550448 -4.416841 -0.2310344 -4.314671 -0.05579471 -4.338616 1.31913 -4.04354 1.216242 -3.968528 1.390117 -3.93482 1.91402 -3.644975 1.802829 -3.583151 1.971131 -3.524607 0.4645856 -3.748823 0.5117012 -3.631168 0.6274817 -3.679047 0.877352 -3.787271 0.9105739 -3.664688 1.031078 -3.698483 -0.4362359 -3.712027 -0.3589835 -3.612308 -0.2607132 -3.690699 1.424664 -3.744684 1.439722 -3.618464 1.563812 -3.63378 -1.385125 -3.44056 -1.281841 -3.369115 -1.211431 -3.473917 1.95319 -3.639112 1.95057 -3.512088 2.075542 -3.509425 -1.983716 -3.111436 -1.86717 -3.065569 -1.821981 -3.183864 2.358082 -3.542134 2.341996 -3.416334 2.465957 -3.400009 -2.327678 -2.847349 -2.205328 -2.821356 -2.179728 -2.945588 2.611036 -3.51726 2.586935 -3.393104 2.70959 -3.368709 -2.588268 -2.608831 -2.463513 -2.600896 -2.455695 -2.727524 2.750162 -3.568927 2.722202 -3.446012 2.844035 -3.417805 -2.787657 -2.427336 -2.662914 -2.435385 -2.670916 -2.560872 0.3974198 -3.768962 0.4783214 -3.673482 0.5736101 -3.754546 2.624403 -2.847952 2.63577 -2.722554 2.760252 -2.734004 2.653554 -2.9137 2.668962 -2.787417 2.793009 -2.803103 2.331597 -3.223614 2.36422 -3.098563 2.484888 -3.132371 2.040828 -3.437029 2.087309 -3.314153 2.203346 -3.363375 1.839159 -3.582019 1.895223 -3.461573 2.006945 -3.522014 2.92104 -3.593221 2.888356 -3.471912 3.009007 -3.43905 4.042942 -2.375494 3.972685 -2.271404 4.076072 -2.200669 4.410955 -1.745528 4.327954 -1.650928 4.421419 -1.56692 4.514481 -1.626835 4.430919 -1.532284 4.523882 -1.447295 4.488343 -1.845025 4.411372 -1.74459 4.509863 -1.6661 4.302515 -2.354733 4.239459 -2.244682 4.347389 -2.180386 3.940667 -3.005751 3.897139 -2.886441 4.014315 -2.84212 3.086702 -3.809596 3.076205 -3.683203 3.200763 -3.672551 1.108507 -4.743722 1.156749 -4.627381 1.272065 -4.676052 -3.138279 -3.706734 -3.01621 -3.679529 -2.989299 -3.802932 -4.662871 -1.383109 -4.547466 -1.432457 -4.595496 -1.551029 -4.834292 -0.5300844 -4.73199 -0.6056211 -4.803818 -0.7132054 -4.85186 -0.2986454 -4.756424 -0.3838964 -4.837153 -0.4846782 -4.849875 -0.3484103 -4.756934 -0.4349473 -4.840522 -0.5311665 -4.338685 -1.968682 -4.223121 -2.017427 -4.270764 -2.135665 -3.173168 -3.420093 -3.04818 -3.421851 -3.049911 -3.548738 -1.587495 -4.273382 -1.470379 -4.228968 -1.42669 -4.348027 -0.08570911 -4.423066 0.01236576 -4.343373 0.08986622 -4.444223 1.163842 -4.075402 1.238519 -3.969182 1.33876 -4.048314 1.578924 -3.753099 1.645118 -3.637477 1.751153 -3.709655 1.281866 -3.56891 1.15864 -3.547628 1.302849 -3.443921 0.3270304 -3.737908 0.2153505 -3.681119 0.3831784 -3.624952 1.53367 -3.585188 1.409236 -3.573108 1.545557 -3.45874 -0.8805973 -3.616855 -0.964739 -3.523461 -0.7881572 -3.531845 1.758031 -3.606663 1.633095 -3.602595 1.762031 -3.479611 -1.833824 -3.210807 -1.884374 -3.095112 -1.719501 -3.159649 1.977994 -3.626138 1.853042 -3.629652 1.974537 -3.499134 -2.291084 -2.881755 -2.318211 -2.758062 -2.169063 -2.854257 2.214251 -3.631472 2.089752 -3.642823 2.203064 -3.505142 -2.587019 -2.61073 -2.594895 -2.484134 -2.462267 -2.602738 2.491256 -3.601512 2.367869 -3.621768 2.471242 -3.476631 -2.728604 -2.464679 -2.723569 -2.33791 -2.603706 -2.469789 2.844884 -3.494826 2.723837 -3.526294 2.813697 -3.372691 -0.2266202 -3.663424 -0.304908 -3.565027 -0.1291727 -3.584373 2.178512 -3.07075 2.057286 -3.039837 2.208996 -2.94782 2.878925 -2.553161 2.754064 -2.547114 2.884813 -2.424911 2.915422 -2.611359 2.790821 -2.600955 2.925402 -2.481468 2.68405 -2.902412 2.561577 -2.875874 2.709061 -2.772462 2.406363 -3.1721 2.287834 -3.129228 2.446059 -3.044088 2.15696 -3.401491 2.043092 -3.345196 2.208527 -3.277182 3.33345 -3.217113 3.217594 -3.264321 3.28652 -3.100572 3.225462 -3.397514 3.106694 -3.436691 3.186486 -3.278134 3.520077 -3.166879 3.403564 -3.212508 3.474801 -3.049455 4.11287 -2.458495 4.005603 -2.523498 4.048691 -2.34985 4.332286 -2.17825 4.228239 -2.248752 4.26301 -2.072361 4.379262 -2.213076 4.27346 -2.281002 4.312697 -2.105111 4.283032 -2.509982 4.17198 -2.568525 4.225651 -2.396683 4.021091 -3.008605 3.903399 -3.051494 3.978977 -2.888746 3.540854 -3.511248 3.417712 -3.532997 3.519381 -3.386523 2.321892 -4.402883 2.1985 -4.382823 2.341877 -4.279028 -4.931837 -0.6661784 -4.867328 -0.5587489 -4.824769 -0.730905 -4.928143 0.5853475 -4.835768 0.6715439 -4.84393 0.4907963 -4.928011 0.4822866 -4.834101 0.5689375 -4.845513 0.3836493 -4.942351 0.2480601 -4.849264 0.3361417 -4.858925 0.1497788 -4.952876 -0.004755707 -4.861108 0.08310842 -4.868002 -0.099757 -4.396459 -2.066354 -4.348259 -1.948363 -4.281126 -2.115665 -2.698021 -3.897973 -2.712062 -3.771272 -2.573812 -3.88365 -1.492772 -4.40643 -1.5378 -4.287789 -1.376164 -4.360617 -0.0668361 -4.543558 -0.1430876 -4.44222 0.03221295 -4.465545 1.323697 -4.172005 1.222445 -4.095031 1.396998 -4.065678 1.752486 -3.836087 1.645383 -3.766524 1.816938 -3.72049 1.807405 -3.673416 1.700456 -3.6026 1.872111 -3.556368 0.9773295 -3.590257 1.004408 -3.466203 1.12644 -3.49373 1.214809 -3.631598 1.23324 -3.505677 1.356874 -3.524449 0.09379505 -3.67412 0.1535342 -3.562777 0.2633351 -3.623355 1.426796 -3.67693 1.437711 -3.55002 1.562233 -3.561144 -0.9449388 -3.515548 -0.8529647 -3.429778 -0.7683139 -3.522968 1.634215 -3.721682 1.638047 -3.594437 1.762988 -3.59834 -1.811301 -3.130729 -1.698171 -3.076841 -1.645006 -3.191507 1.85651 -3.756663 1.853052 -3.62966 1.978004 -3.626146 -2.272603 -2.791838 -2.151033 -2.762335 -2.12195 -2.885658 2.117441 -3.765002 2.105717 -3.638884 2.230166 -3.627003 -2.576797 -2.501426 -2.452105 -2.492534 -2.443345 -2.619101 2.453429 -3.712893 2.431274 -3.588656 2.554295 -3.566282 -2.779533 -2.275409 -2.654789 -2.283519 -2.662785 -2.410054 2.928852 -3.521195 2.891876 -3.401024 3.011282 -3.363811 -1.459725 -3.312833 -1.342157 -3.270348 -1.299699 -3.38799 3.474584 -1.306162 3.428799 -1.189334 3.545112 -1.143347 3.561948 -1.320138 3.51874 -1.201084 3.636034 -1.157227 3.342454 -1.94918 3.323618 -1.822183 3.447191 -1.802826 2.975855 -2.548656 2.983053 -2.418582 3.107845 -2.426084 2.516846 -3.027326 2.548281 -2.898779 2.669264 -2.93218 2.095467 -3.337963 2.145397 -3.213585 2.259992 -3.267778 1.647927 -3.587878 1.714045 -3.470659 1.820127 -3.543719 3.654846 -2.965567 3.594126 -2.855919 3.703388 -2.794984 3.55555 -3.157061 3.502463 -3.043579 3.61563 -2.990345 4.248651 -2.250681 4.172616 -2.151189 4.271832 -2.074944 4.707594 -1.216366 4.612494 -1.134766 4.693618 -1.039108 4.741187 -1.300138 4.649317 -1.214517 4.734082 -1.12172 4.683899 -1.662628 4.600731 -1.568084 4.694048 -1.483823 4.487829 -2.255745 4.419368 -2.149667 4.523954 -2.08023 4.070019 -3.026335 4.023297 -2.908658 4.139236 -2.861236 3.358434 -3.715 3.339617 -3.58936 3.463193 -3.570228 2.504811 -4.292468 2.515698 -4.166377 2.640223 -4.1774 -0.2262255 -4.953313 -0.1467778 -4.856218 -0.05027362 -4.936152 -4.589371 -1.890595 -4.469269 -1.925647 -4.503917 -2.047148 -4.952315 -0.28439 -4.852839 -0.3627245 -4.928533 -0.4656706 -4.941312 0.3319078 -4.855863 0.2358153 -4.947097 0.1458155 -4.885864 0.7493994 -4.812771 0.6432082 -4.914174 0.5666638 -4.743453 1.375014 -4.688201 1.260227 -4.800327 1.203663 -4.731415 -1.150484 -4.628386 -1.222027 -4.699167 -1.326164 -2.529535 -4.036508 -2.406026 -4.017053 -2.386778 -4.141887 -1.223231 -4.502598 -1.109525 -4.449994 -1.057604 -4.565196 -0.09181469 -4.541853 0.007644649 -4.464364 0.08336017 -4.566153 0.996088 -4.231587 1.07672 -4.130706 1.172237 -4.215866 1.284652 -3.948851 1.360591 -3.840453 1.45988 -3.923359 1.230967 -3.800187 1.309928 -3.69224 1.406831 -3.7802 1.732314 -3.308212 1.607316 -3.307547 1.732968 -3.181239 0.969626 -3.560319 0.8488882 -3.527579 1.001989 -3.438177 1.893393 -3.343153 1.7685 -3.348422 1.888219 -3.215995 -0.2208238 -3.648111 -0.3219106 -3.573899 -0.1472952 -3.546085 2.04843 -3.383754 1.923872 -3.39446 2.037926 -3.25681 -1.514685 -3.296228 -1.57786 -3.187328 -1.406825 -3.232444 2.212051 -3.421678 2.088063 -3.437849 2.196177 -3.295368 -2.202701 -2.854626 -2.234721 -2.732358 -2.081872 -2.822224 2.398999 -3.445247 2.275915 -3.467416 2.377198 -3.320081 -2.568304 -2.51408 -2.577476 -2.387707 -2.443641 -2.504782 2.628565 -3.435093 2.506961 -3.464441 2.599624 -3.311783 -2.776188 -2.277432 -2.768371 -2.150804 -2.651432 -2.285366 2.93065 -3.354256 2.811673 -3.392994 2.892318 -3.234022 -2.306577 -2.742011 -2.315485 -2.615538 -2.181895 -2.732975 3.355194 -3.120939 3.241454 -3.173153 3.303346 -3.006396 1.823179 -3.117863 1.705038 -3.07681 1.864017 -2.999101 3.292745 -1.709794 3.170282 -1.73522 3.267687 -1.585532 3.491881 -1.496464 3.370718 -1.528105 3.461144 -1.371735 3.388292 -1.870526 3.264202 -1.886229 3.373238 -1.741095 3.126158 -2.371939 3.001317 -2.365292 3.132459 -2.240243 2.697006 -2.896916 2.57575 -2.864522 2.72737 -2.76755 2.198477 -3.322524 2.084754 -3.266526 2.250361 -3.199782 1.600672 -3.681809 1.498394 -3.603513 1.672535 -3.570375 3.974041 -2.523597 3.872528 -2.596835 3.901102 -2.421668 4.034837 -2.518073 3.932146 -2.589584 3.963567 -2.415035 4.106565 -2.499852 4.002992 -2.570013 4.036583 -2.396014 3.822596 -2.998786 3.710152 -3.053555 3.767995 -2.885998 4.147719 -2.629387 4.039742 -2.692755 4.084744 -2.520736 4.389388 -2.322593 4.284981 -2.392049 4.320654 -2.217089 4.428823 -2.367333 4.32255 -2.434053 4.363013 -2.259591 4.204215 -2.841272 4.090493 -2.893966 4.152328 -2.725779 3.789179 -3.448855 3.668443 -3.481773 3.756808 -3.326079 3.583628 -3.60333 3.460613 -3.625871 3.561444 -3.478337 3.58037 -3.592883 3.456762 -3.611632 3.561772 -3.46828 1.430531 -4.865767 1.314333 -4.819674 1.476607 -4.749526 -4.926092 1.147475 -4.826163 1.223255 -4.850996 1.046635 -4.861511 1.317953 -4.756097 1.387543 -4.794334 1.208752 -4.877791 1.200273 -4.771185 1.269211 -4.812522 1.087675 -4.849966 1.288156 -4.73986 1.350296 -4.790792 1.172533 -4.679595 1.817868 -4.562166 1.861817 -4.636754 1.697403 -4.961465 -0.1098259 -4.868574 -0.02531047 -4.877822 -0.2036854 -3.164716 -3.705156 -3.163387 -3.579248 -3.039723 -3.706495 -1.213079 -4.609443 -1.263977 -4.493844 -1.09891 -4.557907 -0.1404807 -4.668384 -0.2135519 -4.565293 -0.03906274 -4.594108 1.18126 -4.365387 1.084159 -4.283745 1.259977 -4.264678 1.486301 -4.085825 1.385252 -4.007309 1.559881 -3.977997 1.391009 -3.956976 1.292956 -3.872612 1.468537 -3.850279 1.020337 -3.949952 0.9309715 -3.854344 1.107737 -3.852195 1.446008 -3.371785 1.452555 -3.244779 1.577383 -3.25144 1.60864 -3.418336 1.609223 -3.290861 1.734222 -3.291457 0.6855946 -3.552579 0.7233838 -3.431795 0.8425349 -3.470103 1.764 -3.470637 1.759165 -3.343139 1.884071 -3.338204 -0.3807155 -3.55775 -0.3055579 -3.456717 -0.2056763 -3.532741 1.92685 -3.52183 1.916606 -3.394742 2.041186 -3.384292 -1.501107 -3.214511 -1.394764 -3.14805 -1.329067 -3.255631 2.112251 -3.561712 2.096094 -3.435519 2.220045 -3.41907 -2.194419 -2.761476 -2.074062 -2.727301 -2.04031 -2.849165 2.340178 -3.57292 2.316944 -3.448268 2.439766 -3.424687 -2.561065 -2.403951 -2.436466 -2.393806 -2.42646 -2.52014 2.642361 -3.5218 2.609836 -3.399763 2.73053 -3.366875 -2.786157 -2.129234 -2.661469 -2.138191 -2.670296 -2.264725 3.07444 -3.332206 3.028459 -3.215116 3.144694 -3.168796 -3.041978 -1.772744 -2.920885 -1.804008 -2.95189 -1.926113 3.728007 -2.798745 3.660589 -2.693044 3.76585 -2.625344 3.420857 0.9136659 3.315973 0.981741 3.383977 1.086735 3.608956 0.4304899 3.516807 0.5158174 3.601266 0.6089142 3.726221 -0.1248117 3.650464 -0.02317555 3.749892 0.05426432 3.661017 -1.053959 3.615275 -0.9339188 3.731605 -0.8867179 3.308832 -1.995935 3.298605 -1.866033 3.423186 -1.85537 2.737876 -2.75988 2.761696 -2.630204 2.884405 -2.655376 2.12375 -3.259056 2.174382 -3.136199 2.288668 -3.190627 1.465443 -3.584622 1.538154 -3.47295 1.63983 -3.552808 0.9260219 -3.777084 1.013527 -3.677461 1.10279 -3.775122 4.598701 -1.319523 4.497409 -1.246121 4.570656 -1.144614 4.635749 -1.349225 4.536562 -1.273021 4.612633 -1.17366 4.697572 -1.302644 4.599239 -1.225371 4.676413 -1.126912 4.818449 -1.015895 4.717157 -0.9425847 4.790402 -0.8412021 4.927542 0.6945682 4.808154 0.7316663 4.845188 0.8512621 5.028407 0.1650747 4.914817 0.2174538 4.966995 0.3314817 5.009452 -0.8866826 4.910847 -0.8093831 4.987672 -0.7101688 4.65228 -2.185447 4.579516 -2.083001 4.681155 -2.009659 4.001395 -3.301996 3.958797 -3.183328 4.076315 -3.140313 3.478617 -3.753878 3.456568 -3.628836 3.579608 -3.606428 3.309935 -3.849226 3.296665 -3.722975 3.420959 -3.709496 2.665688 -4.303628 2.675536 -4.178029 2.800148 -4.187955 -2.902448 -4.140808 -2.782991 -4.103867 -2.746182 -4.223756 -5.049008 -0.2713375 -4.948746 -0.3472029 -5.023396 -0.4490969 -4.966664 0.8684676 -4.889886 0.7657902 -4.988527 0.68587 -4.781863 1.513432 -4.723902 1.396903 -4.834652 1.335918 -4.429661 2.29244 -4.396198 2.166938 -4.516636 2.132068 -3.987902 3.002013 -3.978715 2.874487 -4.103377 2.865089 -4.868499 0.9389896 -4.806466 0.8296379 -4.914988 0.7671308 -3.171433 -3.704432 -3.046443 -3.705996 -3.047999 -3.831655 -0.8016064 -4.710663 -0.692333 -4.649428 -0.6316344 -4.759667 0.09427359 -4.677382 0.1919373 -4.598234 0.2699552 -4.697312 0.8771338 -4.414552 0.9621822 -4.31913 1.053789 -4.407721 1.068401 -4.160354 1.151246 -4.059469 1.24485 -4.148757 0.9192985 -4.008566 1.006975 -3.910037 1.09607 -4.006998 0.5407949 -3.9415 0.6374622 -3.852895 0.7167114 -3.960974 2.081483 -3.035334 1.957714 -3.053144 2.063984 -2.909369 1.545374 -3.286476 1.420614 -3.278639 1.553115 -3.160156 2.167985 -3.107288 2.04462 -3.127849 2.147833 -2.981415 0.5411024 -3.551154 0.425555 -3.503058 0.5887868 -3.434609 2.266087 -3.17764 2.143234 -3.201204 2.243018 -3.052154 -0.9350355 -3.435821 -1.017502 -3.341189 -0.841098 -3.352744 2.383002 -3.240767 2.260843 -3.267819 2.356505 -3.116051 -2.06591 -2.868237 -2.105015 -2.748402 -1.947184 -2.828766 2.528656 -3.287248 2.4075 -3.318592 2.497893 -3.163801 -2.542844 -2.429957 -2.553768 -2.303978 -2.418322 -2.418905 2.718474 -3.30019 2.598881 -3.337129 2.682106 -3.178718 -2.791048 -2.125627 -2.781942 -1.999224 -2.66638 -2.13486 2.978388 -3.246582 2.86146 -3.291298 2.934196 -3.128268 -2.915861 -1.950466 -2.892951 -1.825711 -2.792979 -1.973726 3.35374 -3.053295 3.24193 -3.109627 3.297851 -2.940596 -1.816276 -3.002492 -1.840701 -2.878903 -1.693685 -2.977868 3.915415 -2.531057 3.815132 -2.606021 3.840794 -2.430312 3.470572 -0.7505899 3.359109 -0.8073711 3.413996 -0.6387228 3.650721 0.0001840697 3.551881 -0.07747669 3.574198 0.1004931 3.74253 -0.2012897 3.637998 -0.271687 3.673988 -0.09392767 3.716725 -0.937858 3.599713 -0.9833421 3.672757 -0.8168115 3.427294 -1.853161 3.302735 -1.864074 3.416808 -1.723537 2.841703 -2.718257 2.719361 -2.691356 2.867343 -2.589899 2.103663 -3.346959 1.992183 -3.286992 2.160207 -3.228729 1.231169 -3.763516 1.137406 -3.6745 1.313834 -3.662548 0.5371031 -3.956896 0.4601137 -3.849293 0.6355797 -3.872772 4.644667 -1.147579 4.575244 -1.251774 4.540718 -1.077992 4.725056 -0.9928289 4.656756 -1.097746 4.620366 -0.9243808 4.817759 -0.7447974 4.752617 -0.8516751 4.711075 -0.6795369 4.912801 -0.3385599 4.854308 -0.449184 4.802331 -0.2799857 4.972114 0.2083931 4.923534 0.09310298 4.85694 0.2570224 4.78154 -1.563215 4.693633 -1.652203 4.692673 -1.475188 4.533847 -2.301651 4.431678 -2.373921 4.461829 -2.199125 4.297452 -2.814432 4.186618 -2.872617 4.239653 -2.702856 3.92258 -3.393969 3.804101 -3.434201 3.882732 -3.274349 3.570187 -3.83446 3.447558 -3.858974 3.545955 -3.710406 3.702032 -3.637387 3.579626 -3.663119 3.676697 -3.51306 4.135906 -3.107161 4.016652 -3.145082 4.09844 -2.986458 4.377238 -2.741501 4.260242 -2.785688 4.333228 -2.624033 -4.090959 3.135936 -3.969385 3.165031 -4.061896 3.014229 -4.520604 2.413743 -4.403561 2.458471 -4.476719 2.294452 -4.626987 2.127445 -4.510605 2.175236 -4.581377 2.005497 -4.563308 2.203534 -4.444352 2.244228 -4.524908 2.07747 -4.266204 2.702052 -4.142846 2.72322 -4.246008 2.57275 -3.516044 3.607025 -3.391677 3.594046 -3.528611 3.478583 -4.188736 2.783136 -4.064399 2.796145 -4.175879 2.657332 -4.787176 -1.371821 -4.715127 -1.269385 -4.685028 -1.444073 -0.9494212 -4.792238 -1.005344 -4.679698 -0.8376283 -4.735941 0.0845071 -4.792902 0.008279456 -4.692771 0.1835745 -4.715856 1.047917 -4.569306 0.9548734 -4.484181 1.131391 -4.474423 1.248504 -4.344524 1.153332 -4.259775 1.329543 -4.244995 1.017082 -4.231321 0.9278189 -4.137694 1.104587 -4.135812 0.5374741 -4.174804 0.4600088 -4.068322 0.6355767 -4.090722 0.07230307 -4.087889 0.00780003 -3.970412 0.1793748 -4.017118 1.83883 -3.119177 1.825987 -2.992501 1.950325 -2.979416 1.933746 -3.190617 1.917818 -3.063985 2.041799 -3.047717 1.256505 -3.336413 1.270296 -3.210433 1.394533 -3.224418 2.038563 -3.262318 2.019379 -3.136032 2.142898 -3.116418 0.2932019 -3.507842 0.3451442 -3.392961 0.4588411 -3.445444 2.16154 -3.328571 2.138642 -3.203013 2.261527 -3.179617 -0.9949171 -3.339419 -0.9015308 -3.255566 -0.8184405 -3.349809 2.313587 -3.380006 2.286164 -3.255667 2.408119 -3.227707 -2.055605 -2.781768 -1.937583 -2.740174 -1.896403 -2.859379 2.511454 -3.399877 2.478152 -3.277448 2.598634 -3.243607 -2.538773 -2.319327 -2.414323 -2.307475 -2.402609 -2.433403 2.783397 -3.35525 2.741932 -3.235899 2.859855 -3.193933 -2.797872 -1.978285 -2.673279 -1.988502 -2.683358 -2.114806 3.1796 -3.172092 3.125926 -3.058293 3.238816 -3.004187 -2.941831 -1.750586 -2.819601 -1.777142 -2.845772 -1.90117 3.781216 -2.654572 3.70789 -2.55287 3.809124 -2.479205 -3.383726 -0.6547773 -3.28179 -0.7273126 -3.354138 -0.8295121 4.578078 -1.241391 4.473972 -1.17204 4.543159 -1.067687 2.595565 2.281662 2.471798 2.299264 2.48931 2.423664 3.187073 1.562782 3.073303 1.615427 3.125086 1.731094 3.512427 0.9948704 3.411466 1.070531 3.485167 1.174176 3.74994 0.009575338 3.675507 0.11294 3.775929 0.1895542 3.584358 -1.328918 3.553445 -1.203899 3.674563 -1.17199 2.895049 -2.545332 2.912915 -2.416517 3.036632 -2.435119 2.011423 -3.283355 2.067357 -3.165014 2.179144 -3.224228 1.12443 -3.647706 1.207398 -3.546257 1.300893 -3.636283 0.5032316 -3.784059 0.6009201 -3.697544 0.6789071 -3.805915 0.1208535 -3.886631 0.2262219 -3.811318 0.2934714 -3.92932 4.536595 1.744027 4.41178 1.737219 4.404983 1.862239 4.56296 1.787211 4.438043 1.782666 4.433505 1.907774 4.524978 1.990364 4.400156 1.983682 4.393483 2.108669 4.41338 2.32634 4.289012 2.313776 4.276462 2.438281 4.238841 2.721474 4.115501 2.701158 4.095202 2.824608 4.101525 3.011407 3.979093 2.986184 3.953888 3.108704 4.393779 2.670161 4.26933 2.658433 4.257615 2.783029 5.15639 0.664309 5.0388 0.7067752 5.081197 0.8245558 4.873455 -1.967539 4.793822 -1.870955 4.890173 -1.791129 4.048731 -3.434964 4.005972 -3.317064 4.123431 -3.274145 3.725282 -3.68679 3.696077 -3.563811 3.817617 -3.53426 3.913163 -3.415308 3.880679 -3.292654 4.001384 -3.259646 3.816514 -3.484526 3.790579 -3.360831 3.912859 -3.334596 2.571479 -4.467304 2.587474 -4.342887 2.711446 -4.358939 -5.127498 -0.5109041 -5.023015 -0.5797551 -5.091631 -0.6845952 -4.909489 1.501073 -4.843679 1.392328 -4.949953 1.324989 -4.583602 2.2132 -4.540236 2.090437 -4.657472 2.045026 -4.128621 2.878451 -4.109835 2.748014 -4.233415 2.728186 -3.444968 3.614007 -3.455396 3.483554 -3.57996 3.494474 -3.373506 3.730601 -3.390714 3.602697 -3.514524 3.620473 -4.041584 2.987236 -4.034968 2.86095 -4.159793 2.854257 -4.885186 -0.9642667 -4.789472 -1.044847 -4.86987 -1.140779 -0.4874289 -4.867066 -0.3814674 -4.800439 -0.3151563 -4.906905 0.5386393 -4.771795 0.6300845 -4.68584 0.7153064 -4.778072 0.7959873 -4.609936 0.8843664 -4.519603 0.972764 -4.609917 0.9080145 -4.396231 0.9959373 -4.302727 1.084789 -4.395254 0.6774921 -4.247064 0.7714337 -4.157879 0.8538957 -4.25948 0.258116 -4.14245 0.3608095 -4.064041 0.4320769 -4.177025 -0.09264404 -4.041572 0.01672921 -3.974412 0.07724774 -4.095788 2.332037 -2.782437 2.211406 -2.815848 2.29928 -2.659397 2.00012 -2.965646 1.876008 -2.980727 1.985248 -2.839789 2.364858 -2.897041 2.244227 -2.930541 2.332098 -2.773688 1.296749 -3.286557 1.172972 -3.268958 1.314194 -3.161692 2.419851 -3.000288 2.299459 -3.034707 2.386226 -2.877058 -0.1028279 -3.49586 -0.2068573 -3.426151 -0.03352503 -3.391221 2.499836 -3.09061 2.379945 -3.12679 2.464464 -2.96798 -1.773289 -2.981237 -1.825633 -2.867019 -1.659776 -2.928568 2.611705 -3.161404 2.492676 -3.200361 2.573532 -3.039933 -2.50574 -2.363729 -2.519172 -2.238328 -2.381464 -2.350176 2.768394 -3.198356 2.650812 -3.241494 2.725975 -3.078779 -2.810363 -1.966612 -2.799515 -1.840606 -2.685835 -1.977588 2.993462 -3.171836 2.878413 -3.221333 2.944587 -3.055324 -2.947599 -1.742323 -2.921036 -1.618434 -2.825454 -1.769266 3.330341 -3.01474 3.220179 -3.07431 3.27127 -2.903646 -2.9064 -1.810278 -2.876347 -1.687133 -2.785067 -1.84078 3.853258 -2.5493 3.754086 -2.625757 3.777167 -2.44965 -1.570707 -3.068698 -1.600059 -2.946972 -1.449202 -3.039293 4.577734 -1.242658 4.508519 -1.346991 4.473647 -1.173278 2.805446 2.051322 2.770713 1.930592 2.685369 2.086244 3.10807 1.761021 3.057395 1.644866 2.993803 1.812534 3.395863 1.407971 3.329323 1.299589 3.290045 1.476123 3.743316 0.5427299 3.650572 0.4569812 3.65951 0.6376245 3.746828 -0.962922 3.627917 -1.002263 3.70829 -0.8415321 2.922667 -2.610596 2.799832 -2.586856 2.945835 -2.484727 1.690081 -3.554859 1.58689 -3.481686 1.760626 -3.447825 0.59192 -3.877937 0.5141446 -3.774028 0.689777 -3.795352 -0.03587545 -3.932201 -0.0965947 -3.813291 0.0733865 -3.86612 -0.3712601 -3.985617 -0.4219445 -3.85961 -0.2569969 -3.929723 4.531161 1.75846 4.538344 1.633483 4.406368 1.751266 4.564114 1.784141 4.568572 1.659023 4.439194 1.779676 4.533367 1.970657 4.539513 1.845613 4.408518 1.964502 4.442266 2.269944 4.453255 2.145249 4.31775 2.25894 4.303713 2.616916 4.321012 2.492969 4.179916 2.599596 4.173148 2.91086 4.195387 2.78774 4.050142 2.8886 4.286704 2.840209 4.30324 2.716234 4.162803 2.823663 5.100229 -1.008992 5.022626 -1.107033 5.002235 -0.9313516 3.912088 -3.506582 3.794397 -3.548765 3.869973 -3.388699 3.494883 -3.994402 3.372395 -4.019424 3.469946 -3.871492 3.313563 -4.218364 3.18957 -4.234272 3.297733 -4.093761 3.9501 -3.549527 3.829435 -3.582534 3.917467 -3.427481 4.589124 -2.617139 4.476258 -2.671597 4.5354 -2.502731 4.94736 -1.805513 4.842963 -1.874845 4.878612 -1.700231 5.033536 1.500261 4.987381 1.383909 4.917369 1.546489 -3.179207 4.139395 -3.054351 4.133355 -3.185204 4.013637 -4.026819 3.235127 -3.90315 3.253943 -4.008622 3.107251 -4.120724 3.028958 -3.99699 3.047777 -4.102976 2.897756 -3.86443 3.280619 -3.739494 3.284902 -3.860417 3.147282 -3.211514 3.872102 -3.088397 3.849206 -3.233126 3.741669 -3.01228 4.065356 -2.891374 4.032137 -3.04401 3.938776 -3.356679 3.812427 -3.233681 3.789705 -3.378959 3.686987 -4.273462 2.716779 -4.149005 2.728456 -4.261829 2.591848 -1.299828 -4.828931 -1.346253 -4.712693 -1.183769 -4.782435 0.5949966 -4.875676 0.5103162 -4.783149 0.6869434 -4.790461 0.8846592 -4.747783 0.7962564 -4.658596 0.9730331 -4.658567 1.010278 -4.600761 0.9211752 -4.511143 1.097946 -4.509676 0.647323 -4.505241 0.5671534 -4.405098 0.7432283 -4.421529 0.1143088 -4.396135 0.04786968 -4.283402 0.2201901 -4.325397 -0.219919 -4.246653 -0.2759106 -4.125371 -0.1081606 -4.18589 -0.5251554 -4.113745 -0.5706053 -3.985961 -0.408711 -4.063869 2.142869 -2.864021 2.112949 -2.740164 2.234315 -2.70963 2.186159 -2.970247 2.155709 -2.846215 2.276944 -2.815063 1.745827 -3.052136 1.736201 -2.925631 1.86083 -2.91586 2.251066 -3.067598 2.2193 -2.943801 2.340197 -2.911273 1.005981 -3.316824 1.029573 -3.192831 1.152327 -3.216662 2.341465 -3.153371 2.307528 -3.030281 2.427833 -2.995558 -0.2780108 -3.414046 -0.2065862 -3.310715 -0.1040019 -3.38266 2.465416 -3.219975 2.428234 -3.098163 2.547575 -3.060211 -1.733906 -2.916793 -1.622085 -2.860512 -1.566219 -2.973164 2.63748 -3.251789 2.595536 -3.132032 2.713289 -3.089374 -2.505388 -2.252811 -2.381195 -2.238502 -2.367016 -2.363835 2.883807 -3.216672 2.834741 -3.100244 2.949709 -3.050555 -2.813807 -1.819937 -2.689364 -1.83187 -2.70116 -1.957771 3.251957 -3.041332 3.191753 -2.930865 3.301299 -2.870155 -2.962929 -1.544953 -2.841507 -1.575058 -2.871201 -1.69816 3.819304 -2.530785 3.740737 -2.433097 3.837959 -2.354154 -3.068667 -1.327428 -2.951979 -1.372837 -2.9968 -1.491057 4.569934 -1.121556 4.462533 -1.057449 4.526485 -0.949787 -2.537423 2.209998 -2.535324 2.084896 -2.660307 2.082795 4.447674 1.864803 4.323418 1.851163 4.3098 1.975625 1.814516 2.832044 1.690318 2.817757 1.676182 2.943287 2.478491 2.404864 2.354647 2.422209 2.371608 2.548852 2.89984 2.062201 2.780782 2.101389 2.818862 2.22391 3.394141 1.411951 3.288242 1.479977 3.354653 1.588452 3.774291 -0.1549808 3.709279 -0.04634438 3.816042 0.01980795 3.05427 -2.318842 3.065069 -2.19194 3.189602 -2.202944 1.614426 -3.481116 1.684192 -3.37382 1.787911 -3.445991 0.5235097 -3.763001 0.621187 -3.679981 0.6991879 -3.783943 -0.04707147 -3.786173 0.06146717 -3.718234 0.1234701 -3.837164 -0.3327117 -3.825186 -0.2195264 -3.766077 -0.1664784 -3.892195 -0.4459059 -3.950542 -0.3307993 -3.896406 -0.2820604 -4.024259 3.49791 3.485986 3.388408 3.425585 3.328122 3.535295 3.629385 3.402788 3.516755 3.348467 3.462539 3.461316 3.724543 3.358029 3.609669 3.308655 3.560385 3.423741 3.793903 3.344404 3.677387 3.299064 3.632121 3.415772 3.843638 3.356758 3.725908 3.314695 3.6839 3.432583 3.876614 3.392288 3.757996 3.352819 3.718566 3.471555 3.891897 3.451684 3.772675 3.414097 3.735112 3.533396 3.880067 3.544171 3.760575 3.50746 3.723876 3.62699 4.095215 3.380488 3.973409 3.352407 3.945334 3.474237 5.195161 1.349913 5.073459 1.378442 5.101984 1.500161 4.803335 -2.525421 4.734646 -2.420958 4.839082 -2.352251 4.276945 -3.24643 4.23031 -3.129804 4.346285 -3.082906 4.554451 -2.748257 4.499746 -2.634382 4.612139 -2.578955 4.59661 -2.604256 4.542568 -2.489998 4.655282 -2.435215 4.658174 -2.448717 4.604001 -2.335245 4.716651 -2.280676 4.234321 -3.103419 4.20091 -2.982939 4.321362 -2.94952 -4.429235 2.778442 -4.391322 2.658385 -4.510434 2.620171 -4.169002 3.059561 -4.145389 2.932573 -4.268138 2.908144 -3.75137 3.434452 -3.747087 3.302125 -3.872014 3.297588 -3.12085 3.90814 -3.140875 3.776254 -3.26426 3.797658 -2.998592 4.016152 -3.02642 3.886902 -3.148283 3.916417 -3.067961 4.028651 -3.098108 3.901684 -3.219418 3.933237 -3.393726 3.78123 -3.414835 3.655587 -3.538039 3.677114 -4.30094 2.673561 -4.288049 2.548754 -4.412383 2.535814 -1.292217 -4.830996 -1.176232 -4.784317 -1.129624 -4.900481 0.9497848 -4.822634 1.035316 -4.730981 1.126472 -4.816978 0.9694703 -4.732588 1.056257 -4.641826 1.146219 -4.729384 0.7838487 -4.633955 0.8757065 -4.547097 0.9604835 -4.641209 0.4713842 -4.508581 0.5702328 -4.428366 0.646744 -4.532 0.05763673 -4.387311 0.1643196 -4.317788 0.2294638 -4.431642 -0.154444 -4.266059 -0.04349642 -4.203828 0.0140852 -4.323734 -0.402793 -4.168838 -0.2876258 -4.116079 -0.2390305 -4.241113 2.499174 -2.569348 2.382578 -2.615409 2.454114 -2.450161 2.322329 -2.653326 2.201978 -2.687645 2.288555 -2.531033 2.496318 -2.723738 2.378966 -2.767849 2.453266 -2.603498 1.915779 -2.902275 1.791349 -2.91432 1.903851 -2.776622 2.519838 -2.85828 2.402202 -2.901621 2.477568 -2.737666 0.9037573 -3.316375 0.7831151 -3.283484 0.9364753 -3.195097 2.570881 -2.97491 2.453367 -3.018551 2.528274 -2.854547 -1.093169 -3.224046 -1.170671 -3.125593 -0.995095 -3.146244 2.654846 -3.069568 2.5379 -3.114663 2.610703 -2.950103 -2.443659 -2.329677 -2.46112 -2.205152 -2.319884 -2.31211 2.783273 -3.130281 2.667522 -3.1783 2.736085 -3.012491 -2.836803 -1.792781 -2.823462 -1.667355 -2.712517 -1.806244 2.978535 -3.130291 2.865064 -3.18341 2.926099 -3.015341 -2.984925 -1.507383 -2.953534 -1.38489 -2.863931 -1.539164 3.284234 -3.006709 3.175332 -3.068593 3.222872 -2.896881 -3.03845 -1.389129 -2.996161 -1.269767 -2.920821 -1.432042 3.782473 -2.585313 3.684133 -2.662838 3.705309 -2.486513 -2.855268 -1.740156 -2.820588 -1.618574 -2.735175 -1.775266 4.524279 -1.293313 4.456311 -1.398444 4.419372 -1.2252 -2.716482 2.008036 -2.591604 2.013569 -2.710949 1.883141 4.417722 1.935465 4.433291 1.811281 4.293695 1.919876 1.262051 3.138853 1.292801 3.016328 1.140892 3.107756 1.854445 2.934874 1.860666 2.807251 1.7296 2.928514 2.190172 2.82773 2.182073 2.699684 2.065435 2.836044 2.688266 2.546638 2.658422 2.422999 2.566881 2.577036 3.558782 1.391916 3.483151 1.291644 3.459258 1.468116 2.987127 -2.510678 2.863936 -2.489383 3.008316 -2.386874 0.4516358 -3.907483 0.3778495 -3.804906 0.5525347 -3.83247 -0.507015 -3.885345 -0.5535418 -3.763988 -0.3909966 -3.836677 -0.8309422 -3.82475 -0.8670662 -3.695585 -0.7112757 -3.785759 -0.9388004 -3.850545 -0.9708728 -3.717536 -0.817985 -3.815235 -0.917179 -3.981796 -0.948963 -3.848711 -0.7962874 -3.946807 3.294036 3.679863 3.360435 3.57379 3.18813 3.613359 3.505441 3.530832 3.563638 3.42002 3.394815 3.472538 3.66019 3.428365 3.711638 3.314251 3.546269 3.37683 3.777635 3.362853 3.823464 3.246371 3.661339 3.316951 3.870405 3.325723 3.911469 3.207495 3.752343 3.284601 3.947335 3.30941 3.984256 3.189852 3.827912 3.272447 4.015659 3.306452 4.048835 3.185836 3.895142 3.273249 4.083687 3.307059 4.113229 3.185539 3.962228 3.277502 4.168099 3.290095 4.193506 3.167675 4.045708 3.264681 5.313875 0.7583863 5.27187 0.6406474 5.196144 0.8003939 2.95167 -4.553367 2.826978 -4.562135 2.942903 -4.428652 2.75 -4.747616 2.625 -4.747616 2.75 -4.622553 4.43321 -3.144293 4.317833 -3.192657 4.385114 -3.028275 5.062372 -1.875477 4.960869 -1.949313 4.98942 -1.772746 5.290864 -0.9343838 5.201288 -1.022633 5.203678 -0.8437153 5.302175 0.7210039 5.240204 0.6116887 5.193617 0.7834068 1.526691 5.091992 1.596708 4.987934 1.423141 5.021632 -2.544878 4.588917 -2.422878 4.561106 -2.5721 4.464276 -3.386216 3.904489 -3.261441 3.896597 -3.393708 3.773065 -3.374073 3.814304 -3.249646 3.80142 -3.386031 3.680234 -2.942565 4.075252 -2.820883 4.044233 -2.971172 3.943308 -2.779671 4.189639 -2.660314 4.149587 -2.816805 4.060902 -2.850088 4.222532 -2.731467 4.180692 -2.88951 4.096636 -2.964623 4.180759 -2.845341 4.142244 -3.001996 4.05783 -3.204494 4.00974 -3.083527 3.977962 -3.23599 3.887692 -4.959256 1.188358 -4.843779 1.236227 -4.911401 1.07285 1.099472 -4.903842 1.007303 -4.819159 1.183911 -4.811407 1.084632 -4.826833 0.9942193 -4.740054 1.170949 -4.735937 0.7103005 -4.81822 0.6285828 -4.722922 0.8048903 -4.735891 0.1737234 -4.754745 0.1051276 -4.648261 0.2782204 -4.684844 -0.269811 -4.608163 -0.3259408 -4.492217 -0.1581219 -4.549894 -0.3592211 -4.45569 -0.4105129 -4.33496 -0.2452292 -4.401366 -0.5162208 -4.326095 -0.5607477 -4.200753 -0.3994203 -4.278312 -0.7515728 -4.213529 -0.7869291 -4.084128 -0.6316773 -4.17537 2.362468 -2.6322 2.318276 -2.512658 2.435203 -2.467477 2.371256 -2.773911 2.328521 -2.653546 2.445989 -2.609756 2.120875 -2.747567 2.090614 -2.624261 2.211896 -2.593495 2.407225 -2.897799 2.364757 -2.777262 2.482322 -2.73372 1.643783 -2.994079 1.637966 -2.867878 1.762831 -2.861999 2.472438 -3.004141 2.429107 -2.884065 2.546356 -2.839689 0.6203719 -3.31223 0.6589297 -3.192576 0.7778343 -3.231377 2.573573 -3.087157 2.528117 -2.968231 2.644559 -2.921805 -1.129861 -3.135001 -1.032796 -3.055856 -0.9540351 -3.153395 2.724099 -3.132342 2.674877 -3.015446 2.789778 -2.96537 -2.433725 -2.233887 -2.310172 -2.214805 -2.291209 -2.339132 2.949376 -3.108055 2.893953 -2.994579 3.005995 -2.938447 -2.836229 -1.646601 -2.712046 -1.661006 -2.726321 -1.786313 3.296958 -2.939979 3.231268 -2.832746 3.337615 -2.766508 -2.988512 -1.311967 -2.868345 -1.346804 -2.902766 -1.468418 3.846344 -2.424586 3.763088 -2.330916 3.856327 -2.247275 -3.051985 -1.13739 -2.936367 -1.185577 -2.983881 -1.302833 4.572828 -0.951213 4.461706 -0.8938386 4.518951 -0.782466 -3.187415 -0.7132922 -3.083209 -0.7830137 -3.152246 -0.8882519 4.281828 2.142778 4.15935 2.117754 4.134366 2.240424 -0.8079266 3.178507 -0.8803022 3.076317 -0.9822177 3.148888 3.312835 3.616897 3.207815 3.548984 3.140022 3.65419 0.9707499 3.119042 0.8534359 3.075165 0.81028 3.194439 1.554003 2.966253 1.430486 2.946508 1.411292 3.073568 1.880299 2.895858 1.755404 2.89056 1.750273 3.019518 2.200448 2.825443 2.07574 2.834194 2.084287 2.961875 2.953971 2.250262 2.83662 2.293702 2.879674 2.412106 3.426462 -1.660101 3.414501 -1.535328 3.538927 -1.523333 0.3295461 -3.820678 0.4313495 -3.747108 0.5038828 -3.850366 -0.5988345 -3.767377 -0.4822955 -3.720194 -0.4370887 -3.841828 -0.8886076 -3.696915 -0.7687506 -3.658665 -0.7332642 -3.787857 -0.9755731 -3.717881 -0.8547213 -3.682735 -0.8227862 -3.815737 -0.9417398 -3.848152 -0.8209023 -3.812919 -0.7889133 -3.946012 -0.7708847 -4.082885 -0.6511188 -4.044217 -0.615326 -4.173602 2.966948 4.054641 2.870488 3.97499 2.790987 4.071633 3.197641 3.918054 3.095377 3.846031 3.023495 3.948498 3.368152 3.820772 3.261653 3.755202 3.196209 3.861907 3.495284 3.758425 3.385647 3.698276 3.325607 3.80811 3.590127 3.72648 3.478139 3.670863 3.422608 3.783026 3.659481 3.721418 3.545723 3.669546 3.493916 3.783447 3.706337 3.741685 3.591269 3.69281 3.542438 3.807981 3.728466 3.789767 3.612512 3.743052 3.565824 3.85907 3.709989 3.88016 3.593701 3.8343 3.547853 3.950619 3.558225 4.091803 3.443176 4.042923 3.3943 4.157979 2.116609 5.053267 2.021027 4.972713 1.940472 5.068295 -2.625 4.875 -2.625 4.75 -2.75 4.75 5.436717 -0.7520695 5.338155 -0.6750876 5.415034 -0.5763947 5.258096 -1.403639 5.172768 -1.311483 5.264114 -1.225399 5.210914 -1.422653 5.129267 -1.326687 5.223917 -1.243904 5.259198 -1.094664 5.17479 -1.001404 5.266988 -0.9160237 5.347363 0.1832951 5.245634 0.2563028 5.318273 0.3585487 0.005288984 5.323682 -0.07396171 5.226766 -0.1706278 5.306221 -3.097023 4.252597 -3.109104 4.125592 -3.233519 4.137924 -3.153266 4.07578 -3.167701 3.944936 -3.291865 3.960148 -2.770389 4.2004 -2.799585 4.069003 -2.921128 4.100566 -2.694326 4.209573 -2.729607 4.079254 -2.849524 4.117594 -2.864532 4.143679 -2.89941 4.014312 -3.019446 4.051902 -3.046946 4.105807 -3.081071 3.978372 -3.201323 4.014535 -3.215209 4.007949 -3.245479 3.883081 -3.366759 3.914246 -3.62728 3.642348 -3.645438 3.517617 -3.769112 3.535931 -5.001613 -1.004055 -4.909703 -1.088819 -4.994423 -1.180776 1.000711 -4.924551 1.086984 -4.833825 1.177438 -4.920357 0.9150608 -4.860253 1.004482 -4.772408 1.091825 -4.862342 0.5095473 -4.840365 0.6074369 -4.761987 0.6851712 -4.860687 0.2363468 -4.755025 0.3399436 -4.683798 0.4098915 -4.789289 -0.01006889 -4.643496 0.09839061 -4.579416 0.1605319 -4.69126 0.008064921 -4.530914 0.117699 -4.46826 0.1777437 -4.582659 -0.1114526 -4.445169 0.00101216 -4.387922 0.05557118 -4.505928 -0.3323947 -4.385092 -0.2162599 -4.336679 -0.1700247 -4.458283 2.600974 -2.404413 2.48858 -2.460441 2.546268 -2.289302 2.531179 -2.386292 2.416029 -2.435827 2.482544 -2.269011 2.574911 -2.592156 2.460897 -2.644739 2.52367 -2.475159 2.341964 -2.499594 2.222338 -2.536274 2.305706 -2.378574 2.575145 -2.754308 2.460221 -2.804777 2.525975 -2.636346 1.812747 -2.853253 1.688001 -2.861259 1.804787 -2.727792 2.602756 -2.895106 2.487479 -2.944644 2.554424 -2.776955 0.1309308 -3.340188 0.02170847 -3.279243 0.1917213 -3.230689 2.662609 -3.012464 2.547513 -3.062291 2.613846 -2.894855 -2.279532 -2.401976 -2.30656 -2.279556 -2.157489 -2.374865 2.765535 -3.09645 2.651288 -3.148061 2.714814 -2.980196 -2.87541 -1.588302 -2.858197 -1.463721 -2.751601 -1.605624 2.933486 -3.122885 2.821166 -3.178441 2.878629 -3.009135 -3.028442 -1.226429 -2.99037 -1.106195 -2.909381 -1.264876 3.211907 -3.032068 3.103744 -3.095228 3.149252 -2.923034 -3.069787 -1.092818 -3.020497 -0.9764338 -2.954916 -1.142758 3.696433 -2.646882 3.598451 -2.724825 3.618816 -2.548489 -3.073593 -1.083516 -3.017791 -0.9699703 -2.961739 -1.140161 4.486008 -1.300364 4.420366 -1.406924 4.37963 -1.234609 -3.027515 -1.2415 -2.970841 -1.129119 -2.916101 -1.298665 4.172431 2.349671 4.203333 2.228426 4.051311 2.318737 -1.066207 3.121132 -0.9623904 3.051308 -1.135828 3.017014 2.995575 3.88441 3.071974 3.785336 2.896639 3.807905 0.08745519 3.280905 0.1551343 3.173628 -0.01763782 3.211819 0.554948 3.312205 0.6054049 3.194024 0.4405841 3.260064 0.6846689 3.391332 0.7291518 3.27018 0.5678516 3.345198 0.6602071 3.534377 0.7040371 3.41415 0.5431433 3.489363 0.3127428 3.732486 0.365966 3.618292 0.1996398 3.67875 -2.986517 2.453376 -2.863521 2.431066 -3.008809 2.330281 -2.455983 -3.062512 -2.43421 -2.938217 -2.332894 -3.084498 -1.960816 -3.404782 -1.957839 -3.275053 -1.835852 -3.407873 -1.738954 -3.518669 -1.743448 -3.384767 -1.614034 -3.513851 -1.554507 -3.643437 -1.564646 -3.507031 -1.429919 -3.632336 -1.334172 -3.839818 -1.350566 -3.703919 -1.210252 -3.821838 -0.9974258 -4.121725 -1.022833 -3.989906 -0.8750351 -4.094361 2.563262 4.321872 2.65165 4.233348 2.474874 4.233348 2.917886 4.131255 2.996678 4.034055 2.820846 4.052335 3.184261 3.975848 3.254642 3.872372 3.080958 3.90535 3.387585 3.85613 3.450696 3.748056 3.279687 3.792916 3.546503 3.768171 3.603331 3.656669 3.435168 3.711259 3.674635 3.706405 3.725979 3.592291 3.560667 3.654996 3.782464 3.664494 3.828922 3.548333 3.666418 3.61799 3.879487 3.6347 3.921425 3.516865 3.761732 3.592733 3.978138 3.604394 4.015549 3.485077 3.858867 3.566968 4.108058 3.539189 4.139939 3.418303 3.987192 3.507304 4.440319 3.209146 4.459243 3.085583 4.316759 3.190221 -2.625 4.875 -2.5 4.875 -2.625 4.75 -2.75 4.875 5.456391 -1.02599 5.372993 -1.119223 5.36328 -0.9424822 5.488408 -0.5101355 5.410837 -0.6090332 5.390389 -0.4318685 5.477311 0.1279323 5.4083 0.02220869 5.373088 0.1979364 5.295116 1.286382 5.246451 1.169704 5.179978 1.335698 4.189762 3.429734 4.191969 3.303475 4.064782 3.427505 0.232797 5.338625 0.3245422 5.25232 0.1478982 5.24536 -2.103239 4.795662 -1.985231 4.752627 -2.144458 4.672455 -2.680049 4.374568 -2.5589 4.341366 -2.710837 4.243921 -2.580112 4.324236 -2.460802 4.283158 -2.617396 4.192785 -2.551952 4.308883 -2.434198 4.262456 -2.59389 4.178523 -2.745857 4.255183 -2.627798 4.210232 -2.786931 4.125983 -2.917392 4.246264 -2.799223 4.202516 -2.958147 4.119415 -2.940841 4.266335 -2.82283 4.223419 -2.982053 4.143448 -2.982273 4.260023 -2.86426 4.218154 -3.02348 4.140113 -3.224494 4.063218 -3.104628 4.027682 -3.25995 3.943082 1.233196 -4.983106 1.140327 -4.899386 1.316864 -4.890179 0.9766684 -4.964646 0.8897899 -4.874524 1.066541 -4.877527 0.3827951 -4.972155 0.3091519 -4.870698 0.4837985 -4.898181 -0.3845066 -4.900603 -0.4392893 -4.787541 -0.2721507 -4.845476 -0.567757 -4.787729 -0.6155936 -4.670541 -0.4522725 -4.739187 -0.2729946 -4.697386 -0.3251147 -4.580536 -0.1593791 -4.643782 -0.2459981 -4.594118 -0.2960366 -4.475002 -0.1314505 -4.542084 -0.3081565 -4.509205 -0.3538463 -4.38749 -0.191806 -4.461409 -0.3547072 -4.453703 -0.3962821 -4.330428 -0.2368236 -4.410227 2.511208 -2.439031 2.4555 -2.324441 2.5674 -2.267395 2.499007 -2.611572 2.446169 -2.495354 2.559453 -2.441148 2.382959 -2.466077 2.335837 -2.348135 2.451615 -2.300131 2.514909 -2.759659 2.463557 -2.642725 2.577521 -2.590034 2.12447 -2.614343 2.092683 -2.491984 2.213574 -2.459811 2.560965 -2.885543 2.509816 -2.768698 2.623873 -2.716298 1.510496 -2.953856 1.510058 -2.828056 1.635057 -2.827615 2.643579 -2.984789 2.591261 -2.868844 2.704786 -2.815411 -0.08984447 -3.273705 -0.02539959 -3.166248 0.08170717 -3.230904 2.776027 -3.04373 2.720817 -2.929669 2.832964 -2.873517 -2.192635 -2.385275 -2.072039 -2.352265 -2.039149 -2.473298 2.984098 -3.030721 2.923486 -2.920052 3.032807 -2.858693 -2.869423 -1.44243 -2.745748 -1.460691 -2.763897 -1.58513 3.318028 -2.868262 3.247814 -2.764039 3.35123 -2.693278 -3.017787 -1.033041 -2.899706 -1.074445 -2.940716 -1.193662 3.866118 -2.33227 3.77861 -2.242641 3.86787 -2.154771 -3.061496 -0.8477733 -2.948856 -0.9026606 -3.003052 -1.016737 4.585533 -0.7014367 4.470218 -0.6531012 4.518461 -0.5375671 -3.07552 -0.7809894 -2.967417 -0.8446627 -3.030175 -0.9543408 4.007324 2.561848 3.889125 2.521121 3.848455 2.639485 -3.18685 0.02891609 -3.10395 -0.06537564 -3.197505 -0.1489285 3.045893 3.805499 2.947484 3.728304 2.870409 3.826867 -0.7740052 3.097331 -0.8498402 2.997354 -0.9492085 3.073653 2.65165 4.232258 2.563262 4.143715 2.474874 4.232258 0.1330927 3.170916 0.02846503 3.101005 -0.03993127 3.207949 0.5692671 3.195042 0.4554668 3.141499 0.4037514 3.25932 0.6791817 3.273477 0.563039 3.225429 0.5168235 3.346178 0.5776642 3.423979 0.4622693 3.374395 0.4142169 3.493469 0.1322887 3.6233 0.02283824 3.562056 -0.0375405 3.673075 -2.180818 3.06052 -2.232618 2.946517 -2.34638 2.998426 -2.814937 -2.593343 -2.695892 -2.631781 -2.734012 -2.751818 -2.193263 -3.152294 -2.068808 -3.164346 -2.080466 -3.293002 -1.940586 -3.317336 -1.815606 -3.319755 -1.817879 -3.452812 -1.750737 -3.465896 -1.625804 -3.461436 -1.621696 -3.59705 -1.538474 -3.678093 -1.413927 -3.666536 -1.403291 -3.801862 -1.220139 -3.976369 -1.096706 -3.955304 -1.076976 -4.08709 -0.5969192 -4.338161 -0.477301 -4.300481 -0.4410176 -4.424703 2.31189 4.56253 2.23239 4.465886 2.135929 4.545538 2.65165 4.408201 2.563262 4.319638 2.474874 4.408201 2.912183 4.280008 2.817073 4.198737 2.735961 4.294035 3.112002 4.18141 3.011788 4.106555 2.937075 4.206959 3.265355 4.112049 3.16124 4.042758 3.092066 4.14705 3.382363 4.070196 3.275252 4.005667 3.210815 4.112933 3.469373 4.054317 3.359967 3.993791 3.299508 4.103319 3.528587 4.064602 3.417471 4.007304 3.360216 4.118505 3.55504 4.106228 3.44279 4.051205 3.387791 4.163505 3.520619 4.202463 3.408101 4.148003 3.353651 4.260542 3.218848 4.503128 3.109559 4.442456 3.048889 4.551749 -2.5 5 -2.5 4.875 -2.625 4.875 -2.625 5 4.875 2.748 4.75 2.748 4.75 2.873091 5.405715 1.260262 5.286826 1.299103 5.325432 1.418718 5.507221 0.2475924 5.399405 0.3116951 5.462657 0.4209618 5.471617 0.282076 5.365466 0.3491206 5.431472 0.4569411 5.31904 1.186018 5.204864 1.237521 5.255743 1.353096 4.180307 3.447744 4.055333 3.445176 4.052785 3.571109 -0.3681558 5.343839 -0.4423541 5.241854 -0.5429504 5.317077 -2.096007 4.798402 -2.137398 4.675252 -2.255346 4.718468 -2.248044 4.563703 -2.28961 4.436077 -2.407497 4.481077 -2.371601 4.410927 -2.414041 4.281049 -2.531616 4.32793 -2.677255 4.253136 -2.716054 4.121808 -2.83488 4.164689 -2.932763 4.158852 -2.969012 4.028217 -3.088641 4.067801 -3.250919 4.043705 -3.282737 3.914326 -3.40362 3.94838 -3.368118 3.975767 -3.397467 3.84952 -3.518973 3.880015 -3.63923 3.741689 -3.66105 3.616788 -3.784131 3.638931 -4.644057 2.329633 -4.625723 2.205723 -4.749371 2.187351 -0.3137541 -5.122746 -0.2061297 -5.059086 -0.1425531 -5.166852 0.3222625 -5.046122 0.4226729 -4.971392 0.4971226 -5.072181 -0.07666249 -4.981102 0.03068282 -4.916675 0.09472955 -5.024658 -0.4684898 -4.891661 -0.3552137 -4.83845 -0.3023598 -4.952491 0.1097005 -4.843903 0.2174167 -4.779949 0.2808376 -4.888571 0.769826 -4.703161 0.8695119 -4.626711 0.9449288 -4.727762 0.8882294 -4.608113 0.9876122 -4.530966 1.063428 -4.632094 0.9202012 -4.544347 1.020685 -4.468683 1.095036 -4.57094 1.069523 -4.471509 1.169199 -4.395013 1.24463 -4.496095 2.653058 -2.288663 2.544575 -2.352351 2.590957 -2.177408 2.655029 -2.179322 2.545329 -2.240493 2.595105 -2.067339 2.61099 -2.503009 2.500046 -2.562152 2.553401 -2.389071 2.599833 -2.154256 2.487338 -2.209514 2.545338 -2.040184 2.592693 -2.688788 2.480195 -2.744734 2.538203 -2.573283 2.408053 -2.295037 2.290092 -2.336689 2.366698 -2.176227 2.5998 -2.851476 2.486428 -2.905427 2.547151 -2.735298 1.641427 -2.844402 1.516429 -2.84512 1.640712 -2.719104 2.637023 -2.990465 2.523387 -3.043647 2.584949 -2.874411 -1.594472 -2.829819 -1.653624 -2.719577 -1.484354 -2.7706 2.714715 -3.097723 2.601505 -3.151605 2.661721 -2.982616 -2.937231 -1.312095 -2.913133 -1.18901 -2.814576 -1.336277 2.854665 -3.151763 2.742926 -3.208455 2.798634 -3.038705 -3.075103 -0.862428 -3.027263 -0.7461282 -2.95962 -0.9106066 3.105294 -3.09614 2.997162 -3.159297 3.042585 -2.987237 -3.09304 -0.7349974 -3.034585 -0.6232951 -2.98255 -0.7940934 3.583489 -2.745964 3.485085 -2.823308 3.506408 -2.647224 -3.084563 -0.7474682 -3.020585 -0.6385766 -2.977178 -0.8123442 4.469125 -1.243542 4.407445 -1.352391 4.360403 -1.18179 -3.067111 -0.8334749 -3.000093 -0.7263462 -2.961596 -0.9015174 3.694197 2.99673 3.747858 2.883744 3.581301 2.943026 -3.19055 -0.2528189 -3.099748 -0.166282 -3.104644 -0.3442868 2.563262 4.146632 2.65165 4.058138 2.474874 4.058138 -1.327805 2.92521 -1.219722 2.861855 -1.3906 2.816163 2.089115 4.537083 2.188051 4.460578 2.012717 4.438009 -0.674595 3.117617 -0.5871133 3.025627 -0.7638808 3.027486 -0.3673859 3.233582 -0.2911259 3.129945 -0.4664284 3.153784 -0.403132 3.324377 -0.3280413 3.219128 -0.5030639 3.245291 -0.7058473 3.409269 -0.6244761 3.30994 -0.8007353 3.324089 -1.452595 3.347978 -1.354803 3.267931 -1.530452 3.247435 -3.066385 2.249302 -2.942866 2.22982 -3.085572 2.123888 -3.82024 -0.7949218 -3.723198 -0.7146426 -3.741451 -0.8937994 -3.113576 -2.407723 -3.060947 -2.289746 -3.000196 -2.462485 -2.547964 -3.013402 -2.519873 -2.883325 -2.426161 -3.043401 -2.122518 -3.367873 -2.110228 -3.232755 -1.998124 -3.381223 -1.714585 -3.69429 -1.71569 -3.558552 -1.58959 -3.69309 -1.192833 -4.060566 -1.209257 -3.928326 -1.068917 -4.043039 -0.2921196 -4.432978 -0.3322709 -4.309952 -0.1737437 -4.39125 1.702828 4.823751 1.808734 4.757247 1.636429 4.717678 2.181247 4.659623 2.278288 4.580703 2.102456 4.562423 2.563262 4.4983 2.65165 4.409765 2.474874 4.409765 2.865998 4.354241 2.946348 4.25833 2.770243 4.27376 3.107021 4.233331 3.180087 4.131754 3.005599 4.160154 3.301653 4.136131 3.36818 4.030163 3.195827 4.069514 3.462642 4.060082 3.523282 3.950656 3.353336 3.999376 3.601088 4.000377 3.656338 3.888157 3.488961 3.945081 3.728671 3.949006 3.778784 3.83443 3.614156 3.898866 3.864102 3.888742 3.908822 3.771983 3.747376 3.844009 4.065541 3.756231 4.102777 3.636894 3.946216 3.718992 4.840319 2.796643 4.847849 2.67187 4.715546 2.789113 -2.5 5 -2.75 5 5.063476 2.526877 5.054766 2.402101 4.938779 2.535592 5.308232 1.829795 5.279776 1.707261 5.186514 1.858443 5.386532 1.44843 5.346152 1.328284 5.268233 1.489441 5.173256 1.987681 5.141787 1.864286 5.052282 2.01978 4.503098 3.160844 4.498231 3.033473 4.378193 3.165807 2.751711 4.695746 2.795571 4.576195 2.634658 4.65095 -0.1289578 5.321267 -0.03315037 5.237816 -0.2092444 5.221683 -1.655254 4.918609 -1.542508 4.860654 -1.709228 4.797546 -2.046626 4.634899 -1.931623 4.580575 -2.095609 4.507357 -2.27793 4.470768 -2.162279 4.417377 -2.325361 4.340583 -2.6152 4.326091 -2.497928 4.277541 -2.658469 4.194504 -2.880907 4.247457 -2.762701 4.202515 -2.921554 4.116762 -3.181795 4.164944 -3.062381 4.12498 -3.218746 4.035792 -3.183168 4.188702 -3.064056 4.148893 -3.221081 4.063635 -3.208956 4.191089 -3.089952 4.151955 -3.247204 4.069329 -3.367641 4.059364 -3.247576 4.024384 -3.402417 3.938597 -5.06403 -1.329257 -4.980386 -1.236359 -4.97114 -1.412908 -0.1341611 -5.167072 -0.1979126 -5.059408 -0.02664018 -5.103236 -0.5650407 -5.0677 -0.6170865 -4.953716 -0.4513911 -5.015501 -0.6967325 -4.980439 -0.7431476 -4.863786 -0.5806694 -4.933788 0.07764793 -4.958989 0.01630516 -4.849281 0.186561 -4.897198 0.7052411 -4.842382 0.6327279 -4.739606 0.8070588 -4.769186 0.8695338 -4.743115 0.7956992 -4.640902 0.9703973 -4.668292 0.9836812 -4.650887 0.9095624 -4.548508 1.084336 -4.575499 1.225123 -4.535806 1.14797 -4.435645 1.323472 -4.457232 1.66259 -4.353344 1.578201 -4.25961 1.754805 -4.267565 2.604512 -2.290506 2.539722 -2.180911 2.646621 -2.114486 2.580053 -2.487546 2.519087 -2.375529 2.628211 -2.312947 2.552744 -2.23258 2.492468 -2.120799 2.601975 -2.059271 2.581873 -2.656291 2.523306 -2.542978 2.633737 -2.482882 2.439956 -2.25906 2.387797 -2.143847 2.501395 -2.090946 2.612846 -2.799871 2.555348 -2.686211 2.66634 -2.627332 2.167255 -2.446919 2.131906 -2.326113 2.251803 -2.290496 2.67973 -2.914648 2.621883 -2.801562 2.732693 -2.742527 1.277569 -2.956313 1.287052 -2.831316 1.411691 -2.840826 2.796238 -2.98748 2.736275 -2.876037 2.845954 -2.81511 -1.529411 -2.787896 -1.422056 -2.723766 -1.358025 -2.831287 2.989718 -2.986188 2.925053 -2.878007 3.032028 -2.812613 -2.919008 -1.174916 -2.796472 -1.199698 -2.821168 -1.322661 3.316288 -2.827406 3.242489 -2.725828 3.343378 -2.651525 -3.045355 -0.6725641 -2.931079 -0.7235711 -2.981735 -0.8386397 3.882756 -2.24834 3.791295 -2.162848 3.876501 -2.07108 -3.059147 -0.4991622 -2.951167 -0.5628068 -3.014137 -0.6719447 4.600015 -0.3029138 4.480013 -0.2678707 4.515007 -0.1477002 -3.055259 -0.4611158 -2.951859 -0.5323148 -3.022099 -0.6371264 3.574786 3.09362 3.465178 3.03346 3.405087 3.143194 -3.05132 -0.4878249 -2.951511 -0.5641956 -3.026764 -0.6654875 2.65165 4.057506 2.563262 3.969002 2.474874 4.057506 -3.049873 0.5898407 -2.988711 0.4799644 -3.097726 0.4183185 2.218643 4.444613 2.141568 4.346051 2.043159 4.423245 -1.186488 2.873703 -1.250524 2.765305 -1.357876 2.829966 1.871536 4.73188 1.803743 4.626674 1.698723 4.694587 -0.5839619 3.03993 -0.6733366 2.950345 -0.7607274 3.041964 -0.2614228 3.147958 -0.3611586 3.069547 -0.4365096 3.173333 -0.3056825 3.233404 -0.4061132 3.15536 -0.4805355 3.260677 -0.6209377 3.313777 -0.7159099 3.228798 -0.7971828 3.328101 -1.32491 3.267563 -1.403644 3.167262 -1.500731 3.248602 -2.810769 2.388414 -2.836981 2.263981 -2.959202 2.290668 -3.797265 -0.1396036 -3.734418 -0.2498176 -3.842471 -0.3139216 -3.386727 -1.838627 -3.282338 -1.909953 -3.351098 -2.018238 -2.896214 -2.585909 -2.778936 -2.631734 -2.822188 -2.755987 -2.520948 -3.017474 -2.398918 -3.046532 -2.426002 -3.177459 -2.164358 -3.399022 -2.040111 -3.413687 -2.053818 -3.546612 -1.703719 -3.82145 -1.578727 -3.819933 -1.577284 -3.951329 -0.837291 -4.297237 -0.7151229 -4.270082 -0.6886664 -4.395476 1.514621 -4.309059 1.608066 -4.22526 1.69109 -4.319576 5 2.748253 4.875 2.748253 4.875 2.873332 4.937179 2.749853 4.812209 2.752625 4.814964 2.878391 5.158496 2.197089 5.035015 2.216793 5.05444 2.342046 5.319613 1.665397 5.199708 1.701442 5.235034 1.823789 5.160984 2.016794 5.039822 2.048175 5.070559 2.171878 4.506317 3.158889 4.381417 3.163976 4.38641 3.29122 2.645422 4.763446 2.529369 4.716119 2.482927 4.834383 -0.3313723 5.323402 -0.4080703 5.221029 -0.506774 5.300578 -1.429145 4.966847 -1.487913 4.848037 -1.598237 4.911325 -1.96689 4.65758 -2.017665 4.530741 -2.131888 4.587124 -2.45706 4.406943 -2.500271 4.275297 -2.617565 4.323796 -2.818765 4.235096 -2.856992 4.101958 -2.976004 4.144723 -3.177896 4.084786 -3.210825 3.951936 -3.33141 3.988213 -3.624689 3.858614 -3.649305 3.726554 -3.771857 3.75308 -3.731553 3.770362 -3.753651 3.641557 -3.876682 3.664692 -3.973781 3.521867 -3.988596 3.395103 -4.112715 3.410234 -4.639858 2.542522 -4.628 2.417408 -4.752436 2.405485 -3.658212 -3.748114 -3.535516 -3.77203 -3.559408 -3.894849 -1.02425 -5.062802 -0.9073349 -5.018455 -0.8631098 -5.135692 -0.9664099 -5.00249 -0.848982 -4.95946 -0.806137 -5.077396 -0.5570037 -5.000059 -0.442283 -4.950198 -0.3926427 -5.065429 0.5941984 -4.931591 0.6961513 -4.858887 0.7684742 -4.961376 1.502602 -4.670624 1.59099 -4.581675 1.679379 -4.670624 2.163979 -4.33922 2.240377 -4.239544 2.339313 -4.316514 2.64432 -3.999985 2.710719 -3.893187 2.816625 -3.960146 2.998973 -3.677031 3.057169 -3.565409 3.167796 -3.624129 3.266978 -3.376865 3.318426 -3.26191 3.432347 -3.313825 2.66684 -2.219664 2.56171 -2.28905 2.599218 -2.111793 2.718707 -2.033245 2.614039 -2.103132 2.650372 -1.9262 2.611821 -2.455072 2.503464 -2.519067 2.549501 -2.343803 2.740489 -1.893222 2.635313 -1.96189 2.67294 -1.786303 2.576717 -2.661169 2.466176 -2.721047 2.518359 -2.547748 2.718549 -1.827628 2.611225 -1.892315 2.654466 -1.71929 2.56366 -2.844011 2.451707 -2.900926 2.508059 -2.729412 2.567054 -1.957341 2.453568 -2.009923 2.514654 -1.843463 2.577083 -3.004243 2.46437 -3.059361 2.523038 -2.889291 1.05057 -3.008946 0.9280261 -2.984277 1.075229 -2.886351 2.62652 -3.135909 2.513729 -3.19061 2.572639 -3.021402 -3.051971 -0.7872782 -3.011239 -0.6689492 -2.933793 -0.8280619 2.732974 -3.221039 2.621064 -3.277301 2.677287 -3.107975 -3.104384 -0.3377224 -3.041347 -0.2292974 -2.996444 -0.4010428 2.947504 -3.208584 2.838411 -3.269962 2.886481 -3.098854 -3.089762 -0.2803619 -3.01908 -0.1763871 -2.986666 -0.3516464 3.418035 -2.905713 3.317855 -2.980654 3.343275 -2.80529 -3.071449 -0.3508573 -2.997425 -0.2488926 -2.970724 -0.4257917 4.491417 -1.039573 4.437524 -1.152425 4.378632 -0.9856482 -3.052991 -0.478172 -2.977413 -0.3771422 -2.953427 -0.554863 2.863821 3.762213 2.945093 3.667181 2.768848 3.680891 -3.043713 -0.5871613 -2.965895 -0.4878264 -2.94589 -0.6661822 1.932649 4.445776 2.034602 4.373382 1.860326 4.343723 -3.080108 0.5332959 -2.968877 0.5908475 -3.023074 0.4210549 1.464991 4.747301 1.574621 4.687173 1.404939 4.637533 -1.664702 2.652638 -1.550923 2.600106 -1.716464 2.537165 1.118036 4.964857 1.232251 4.913987 1.067242 4.850471 -1.204092 2.8773 -1.104625 2.798958 -1.279798 2.77437 -0.9772557 3.022995 -0.8863014 2.93257 -1.063001 2.927077 -1.072421 3.083 -0.9813557 2.991691 -1.158049 2.985892 -1.405893 3.085605 -1.308751 3.001762 -1.48456 2.98207 -2.037686 2.911073 -1.928704 2.846536 -2.098906 2.796185 -3.112766 2.054953 -2.989009 2.036665 -3.130355 1.926276 -3.852149 0.2649385 -3.735872 0.3125219 -3.806274 0.1443322 -3.660742 -1.428179 -3.57549 -1.332338 -3.569326 -1.517559 -3.110169 -2.454601 -3.05513 -2.3351 -2.997938 -2.513205 -2.565525 -3.077109 -2.533428 -2.94732 -2.444716 -3.111592 -2.001564 -3.573239 -1.989562 -3.440148 -1.877141 -3.586077 -1.26595 -4.050304 -1.27665 -3.919336 -1.141409 -4.039052 0.01719651 -4.43613 -0.02788663 -4.316167 0.1337834 -4.389742 2.3084 -4.017152 2.212615 -3.935826 2.388713 -3.92016 5.535594 1.618912 5.50622 1.49736 5.414095 1.6483 5.012898 2.748745 5.00692 2.623131 4.888041 2.75476 4.926126 2.800031 4.91875 2.673158 4.801344 2.807531 4.909874 2.745333 4.898131 2.617603 4.785427 2.757386 4.497164 3.303359 4.497269 3.174501 4.372164 3.303251 3.581738 4.204517 3.606877 4.078005 3.459292 4.178543 1.898196 5.088398 1.960076 4.975247 1.789587 5.023931 -0.2381713 5.268967 -0.1421709 5.183331 -0.3182271 5.166275 -1.282716 4.968982 -1.175317 4.898194 -1.346673 4.850113 -1.910604 4.694357 -1.798107 4.632731 -1.965097 4.567135 -2.430772 4.461345 -2.314772 4.408331 -2.477345 4.329304 -2.814652 4.300055 -2.696663 4.253378 -2.855927 4.166623 -3.177421 4.163745 -3.05775 4.123613 -3.21353 4.030742 -3.609126 3.967203 -3.487437 3.936179 -3.637707 3.83511 -3.617038 3.965495 -3.495491 3.934688 -3.646217 3.837166 -3.677658 3.920109 -3.55589 3.891051 -3.705898 3.794811 -3.90631 3.684254 -3.783234 3.662188 -3.92816 3.559966 -5.032613 1.776937 -4.910836 1.805165 -5.004412 1.655042 -2.653082 -4.560606 -2.655757 -4.435513 -2.528111 -4.557929 -1.695085 -4.925448 -1.71951 -4.80255 -1.572494 -4.900961 -0.4818123 -5.117369 -0.532024 -5.002375 -0.3673406 -5.066929 0.6990546 -5.028257 0.6260754 -4.926197 0.8005388 -4.954863 1.59099 -4.75742 1.502602 -4.668413 1.679379 -4.668413 2.232475 -4.428719 2.134065 -4.351002 2.30955 -4.329489 2.696041 -4.099396 2.591021 -4.030961 2.763833 -3.99338 3.037762 -3.79013 2.928261 -3.729228 3.098048 -3.679511 3.295693 -3.506103 3.183062 -3.451326 3.349909 -3.392305 3.494794 -3.24674 3.37992 -3.19697 3.544078 -3.130731 2.65563 -2.186772 2.583795 -2.081849 2.686092 -2.00817 2.623474 -2.402526 2.556101 -2.294471 2.661391 -2.225328 2.654486 -2.055098 2.584195 -1.949404 2.687559 -1.877529 2.614403 -2.588551 2.55013 -2.478631 2.657339 -2.412732 2.6274 -1.971919 2.560262 -1.864729 2.665702 -1.796477 2.632266 -2.748096 2.569786 -2.637367 2.67805 -2.573464 2.54823 -1.973255 2.487601 -1.862894 2.596912 -1.801682 2.684379 -2.877961 2.622283 -2.767415 2.730768 -2.70414 2.289777 -2.191044 2.246265 -2.07343 2.363448 -2.029758 2.78523 -2.965376 2.721747 -2.856143 2.829426 -2.791743 0.5453674 -3.07476 0.5852045 -2.9562 0.7036866 -2.996064 2.964778 -2.977282 2.89725 -2.87107 3.002439 -2.802885 -2.961821 -0.8584341 -2.841291 -0.8916105 -2.874422 -1.012305 3.289438 -2.821123 3.213062 -2.721626 3.312016 -2.644831 -3.04645 -0.1572076 -2.940042 -0.2230901 -3.005634 -0.3299699 3.902123 -2.162973 3.806808 -2.081909 3.887678 -1.986366 -3.024692 -0.06057921 -2.924407 -0.1358251 -2.999026 -0.2369531 4.565008 0.4165612 4.440636 0.4290913 4.453155 0.5535721 -3.008218 -0.0864646 -2.9117 -0.1668592 -2.99113 -0.2645496 2.904061 3.698459 2.809998 3.616066 2.727674 3.710209 -2.999268 -0.1615745 -2.905501 -0.245435 -2.988162 -0.3405623 2.062823 4.359723 1.989843 4.258131 1.888359 4.331188 -2.999825 -0.2283788 -2.909247 -0.3158378 -2.995391 -0.4077982 1.629976 4.667646 1.568634 4.558584 1.45972 4.620011 -2.938001 0.7288932 -2.886276 0.6140291 -3.000072 0.5618181 1.311725 4.892663 1.259091 4.7791 1.145713 4.83182 -1.676205 2.529875 -1.722404 2.412364 -1.838553 2.459104 -1.18766 2.794362 -1.260453 2.690327 -1.362071 2.764852 -0.9003251 2.963329 -0.9856539 2.868224 -1.076999 2.957065 -0.9921066 3.019796 -1.077423 2.923497 -1.16878 3.013428 -1.331346 3.012358 -1.409312 2.908998 -1.507017 2.991477 -1.94363 2.841328 -2.004296 2.726317 -2.113587 2.790157 -2.934928 2.105908 -2.955691 1.977393 -3.078954 1.999041 -3.679979 0.7233559 -3.647731 0.5975494 -3.768499 0.5639559 -3.781443 -0.6802106 -3.707267 -0.7857963 -3.80788 -0.8636388 -3.51315 -1.70355 -3.414685 -1.785098 -3.491688 -1.889376 -3.139507 -2.428984 -3.027896 -2.48876 -3.084182 -2.607293 -2.733093 -3.013877 -2.614093 -3.054161 -2.652354 -3.179451 -2.165015 -3.610879 -2.041227 -3.628835 -2.058595 -3.756813 -0.9222379 -4.281344 -0.7989506 -4.260431 -0.7783295 -4.385463 2.36498 -3.870209 2.441522 -3.770782 2.540347 -3.847791 3.474576 -3.098869 3.520405 -2.981577 3.636701 -3.027799 5.170876 2.539884 5.046123 2.547791 5.053983 2.673303 4.959221 2.814929 4.834272 2.818582 4.837865 2.945619 4.891844 2.833313 4.766983 2.839351 4.772867 2.967485 4.888614 2.77002 4.764082 2.781161 4.774887 2.909565 4.497211 3.300456 4.372211 3.30035 4.372108 3.429335 3.57961 4.206783 3.457177 4.180751 3.431978 4.307231 1.800249 5.13245 1.692809 5.066053 1.628922 5.177714 -0.2445044 5.26928 -0.3244509 5.166503 -0.4205423 5.252012 -1.40645 4.952147 -1.467932 4.831979 -1.576767 4.899864 -2.160078 4.62791 -2.209095 4.49848 -2.324083 4.553654 -2.67403 4.368245 -2.714856 4.234314 -2.833001 4.280594 -3.110614 4.153102 -3.144436 4.017589 -3.264773 4.055677 -3.570847 3.912399 -3.596294 3.776951 -3.718677 3.805116 -4.106147 3.545383 -4.119745 3.410919 -4.244003 3.425634 -4.20623 3.417036 -4.216898 3.285865 -4.341442 3.2971 -4.427406 3.114834 -4.430263 2.986424 -4.55523 2.98936 -4.913684 2.218058 -4.894141 2.093387 -5.017604 2.073653 -5.095342 -1.604849 -4.999608 -1.685401 -5.079981 -1.781347 -2.684694 -4.540287 -2.559707 -4.538477 -2.557901 -4.663731 -1.642213 -4.943916 -1.519891 -4.918121 -1.494154 -5.040718 0.1561598 -5.143147 0.2635377 -5.078975 0.3275298 -5.186655 1.502602 -4.856394 1.59099 -4.767664 1.679379 -4.856394 2.368491 -4.430827 2.440814 -4.328338 2.542767 -4.401041 2.925796 -4.014735 2.985848 -3.904368 3.095478 -3.964824 3.299026 -3.642807 3.34982 -3.527676 3.464035 -3.578878 3.560628 -3.315153 3.604325 -3.196993 3.721438 -3.241081 3.751796 -3.024205 3.789922 -2.904057 3.908966 -2.942537 3.896652 -2.761972 3.930293 -2.6405 4.050681 -2.674443 2.649469 -2.19405 2.546977 -2.267436 2.577913 -2.088937 2.739956 -1.942906 2.639582 -2.019184 2.665458 -1.840134 2.581359 -2.446679 2.474949 -2.513955 2.51577 -2.337532 2.806848 -1.714199 2.708191 -1.79241 2.730091 -1.613673 2.528388 -2.670891 2.419186 -2.733201 2.467561 -2.559027 2.858411 -1.500313 2.761218 -1.579871 2.779808 -1.401941 2.49267 -2.873103 2.381499 -2.931486 2.435521 -2.75953 2.899825 -1.293385 2.803979 -1.374066 2.819586 -1.197012 2.477645 -3.055326 2.365164 -3.110815 2.42312 -2.940857 2.936603 -1.079343 2.842292 -1.161468 2.854563 -0.9849348 2.49071 -3.214384 2.377539 -3.268157 2.437632 -3.099734 -2.928292 0.9817465 -2.832219 1.061728 -2.848323 0.885658 2.549413 -3.33787 2.436331 -3.391586 2.496145 -3.223837 -3.010562 0.5127168 -2.923098 0.6022433 -2.921258 0.4250352 2.70188 -3.386683 2.590384 -3.44344 2.645368 -3.274704 -3.010742 0.3226331 -2.923857 0.4130581 -2.920875 0.2352094 3.128828 -3.179902 3.024256 -3.248481 3.060347 -3.07518 -3.008602 0.1275771 -2.922379 0.2190123 -2.9181 0.04046493 4.58056 -0.1859174 4.551778 -0.307579 4.458919 -0.157131 -3.00361 -0.07281251 -2.918147 0.01964628 -2.91239 -0.1594364 1.659317 4.400482 1.766063 4.335408 1.594276 4.293682 -2.998661 -0.241528 -2.912925 -0.1491622 -2.907697 -0.3285851 1.04368 4.709367 1.158814 4.660653 0.9950061 4.594139 -3.002195 -0.3575788 -2.914536 -0.2670612 -2.913083 -0.4466198 0.6810239 4.897499 0.7998528 4.858664 0.6422335 4.778534 -2.970133 0.7018094 -2.854008 0.7486106 -2.923874 0.5843224 0.4350176 5.047315 0.5558468 5.015248 0.4029971 4.926312 -2.00884 2.313647 -1.889723 2.275094 -2.046735 2.19246 -1.605833 2.619263 -1.498184 2.55375 -1.669369 2.508265 -1.38513 2.807951 -1.285512 2.728496 -1.460637 2.703125 -1.478349 2.85283 -1.378892 2.771947 -1.554068 2.746589 -1.788362 2.809987 -1.683886 2.736272 -1.85699 2.697767 -2.345874 2.573623 -2.231928 2.518518 -2.397267 2.451448 -3.132725 1.895572 -3.008948 1.877089 -3.150164 1.764383 -3.750745 0.6684579 -3.629078 0.6986348 -3.722074 0.5404038 -3.832271 -0.7598871 -3.729667 -0.6846889 -3.760875 -0.8679551 -3.454494 -1.951822 -3.37928 -1.846246 -3.354655 -2.031358 -2.858879 -2.836218 -2.811421 -2.713883 -2.743239 -2.886423 -2.138124 -3.523321 -2.117543 -3.393945 -2.01483 -3.544917 -1.093978 -4.120031 -1.106068 -3.991446 -0.9695636 -4.107535 0.8298738 -4.359403 0.7672783 -4.249221 0.9380718 -4.295659 3.023101 -3.476703 2.915131 -3.413149 3.086089 -3.367762 3.651401 -3.009773 3.534885 -2.96411 3.696667 -2.892234 5.388039 2.21906 5.369588 2.094774 5.264408 2.237608 5.01295 2.859603 5.007228 2.732712 4.888082 2.865419 4.670358 3.282709 4.673368 3.153988 4.545394 3.279608 4.381573 3.567383 4.390182 3.437439 4.25687 3.558412 3.823367 4.078223 3.845087 3.949215 3.700268 4.05546 2.900438 4.69107 2.942491 4.566996 2.782724 4.646744 1.376924 5.206433 1.447754 5.096133 1.273928 5.130579 -0.2563239 5.237196 -0.1614353 5.147859 -0.3376943 5.133017 -1.388062 4.975148 -1.280687 4.903279 -1.45206 4.854567 -2.158137 4.673569 -2.044279 4.61476 -2.209724 4.543769 -2.700541 4.423302 -2.58317 4.374119 -2.743542 4.28906 -3.155034 4.212126 -3.035258 4.171598 -3.190796 4.076387 -3.618707 3.97807 -3.496776 3.947451 -3.646236 3.842449 -4.148684 3.627059 -4.024738 3.609462 -4.164881 3.492397 -4.167511 3.584335 -4.043553 3.56727 -4.183623 3.453045 -4.25957 3.463399 -4.135304 3.449417 -4.2731 3.334984 -4.524922 3.08342 -4.399988 3.079296 -4.52899 2.956777 -5.26272 1.383991 -5.143162 1.420575 -5.226238 1.264096 -3.83702 -3.776053 -3.805191 -3.655005 -3.71614 -3.807926 -1.640692 -5.058818 -1.665844 -4.936103 -1.518249 -5.033611 0.2514432 -5.249028 0.1868124 -5.141718 0.3584379 -5.184207 1.59099 -4.944317 1.502602 -4.855562 1.679379 -4.855562 2.441286 -4.518067 2.339802 -4.444673 2.514266 -4.416007 2.986958 -4.108929 2.878045 -4.047139 3.048301 -3.999221 3.352846 -3.74642 3.239468 -3.693326 3.40548 -3.632051 3.609846 -3.429119 3.493626 -3.38265 3.655866 -3.311764 3.797844 -3.14943 3.679725 -3.108114 3.838743 -3.030106 3.940023 -2.899973 3.820583 -2.862742 3.976889 -2.779348 4.050384 -2.674901 3.93 -2.640944 4.084038 -2.553433 2.674304 -2.125281 2.597103 -2.024498 2.695413 -1.945355 2.635833 -2.355476 2.563549 -2.25094 2.66553 -2.176844 2.708212 -1.931949 2.630386 -1.831816 2.728202 -1.752146 2.616578 -2.556331 2.547969 -2.449383 2.652458 -2.37916 2.729288 -1.761886 2.651403 -1.66227 2.749172 -1.582914 2.621097 -2.730893 2.554938 -2.622647 2.660995 -2.555122 2.738898 -1.616663 2.661621 -1.517216 2.759873 -1.438999 2.657091 -2.876355 2.592038 -2.767833 2.698776 -2.701692 2.729987 -1.512213 2.654665 -1.411901 2.754422 -1.33616 2.739749 -2.980577 2.674074 -2.872921 2.780431 -2.806444 2.637278 -1.568842 2.570113 -1.463306 2.675535 -1.396068 2.902366 -3.009965 2.833367 -2.904925 2.937598 -2.835391 -3.023063 -0.03727487 -2.913693 -0.09780996 -2.974217 -0.2071991 3.228219 -2.859129 3.150556 -2.760794 3.248503 -2.682823 -2.910216 0.6628215 -2.822753 0.5732951 -2.912057 0.4856134 3.936156 -2.052843 3.836668 -1.977059 3.912346 -1.877432 -2.908943 0.5108859 -2.822059 0.4204609 -2.911926 0.3330372 4.180189 1.824102 4.058981 1.793535 4.028428 1.914799 -2.911192 0.3443 -2.824516 0.2533038 -2.914583 0.1657336 1.816934 4.314026 1.750643 4.207992 1.644669 4.274321 -2.913731 0.1846641 -2.827559 0.09288383 -2.91811 0.005542379 1.204509 4.648821 1.154708 4.534071 1.040057 4.583914 -2.917961 0.05586415 -2.833343 -0.0375595 -2.925348 -0.1234829 0.8600963 4.848039 0.8198365 4.729557 0.7014974 4.769866 -2.928841 0.01346036 -2.848678 -0.08397519 -2.944589 -0.1654122 0.6279516 5.006318 0.5941993 4.885779 0.4738424 4.919582 -2.899592 0.547773 -2.845255 0.4339516 -2.957827 0.3790107 -2.184399 2.013708 -2.205829 1.88919 -2.328979 1.910858 -1.717585 2.450009 -1.771668 2.335315 -1.884363 2.390358 -1.377358 2.730778 -1.449599 2.625238 -1.55161 2.699978 -1.443833 2.78456 -1.517321 2.678309 -1.618438 2.755528 -1.751599 2.730403 -1.817752 2.617957 -1.923812 2.688094 -2.323952 2.466025 -2.371301 2.343301 -2.486986 2.39353 -2.986289 1.907293 -3.00516 1.776099 -3.128727 1.796135 -3.545495 1.025964 -3.528753 0.8946835 -3.652627 0.8769404 -3.807689 0.009896735 -3.75728 -0.1112598 -3.871665 -0.1646525 -3.784763 -0.9384651 -3.708254 -1.042976 -3.807104 -1.123866 -3.571336 -1.771463 -3.476083 -1.856437 -3.557026 -1.956434 -3.197718 -2.551003 -3.088554 -2.614063 -3.14945 -2.727107 -2.545933 -3.369645 -2.42579 -3.404767 -2.460297 -3.527051 -0.3948395 -4.353104 -0.2739989 -4.320971 -0.2420217 -4.442399 3.135653 -3.223984 3.191376 -3.111446 3.303269 -3.167491 3.639411 -2.839716 3.680475 -2.720766 3.798538 -2.762139 4.009963 -2.521509 4.039892 -2.399162 4.161256 -2.429333 5.234285 2.531243 5.109727 2.541912 5.120225 2.668498 4.976883 2.883392 4.851954 2.887703 4.856138 3.016391 4.682869 3.232006 4.557895 3.229355 4.555351 3.359559 4.395123 3.537412 4.270389 3.528881 4.26224 3.659451 3.834667 4.063473 3.711511 4.041023 3.69012 4.170272 2.910561 4.682942 2.79276 4.638849 2.750951 4.763087 1.199768 5.271058 1.099099 5.192143 1.025 5.299354 -0.5876097 5.251231 -0.6632725 5.142836 -0.762772 5.225263 -1.726239 4.919849 -1.783439 4.795899 -1.894584 4.859689 -2.466126 4.576225 -2.510835 4.443898 -2.627566 4.494581 -3.025538 4.277055 -3.06065 4.140537 -3.180617 4.180493 -3.536993 3.983608 -3.562673 3.845606 -3.685006 3.874574 -4.067307 3.621126 -4.081693 3.48349 -4.205862 3.499435 -4.61996 3.112024 -4.620147 2.976392 -4.745147 2.976594 -4.698201 2.949054 -4.695339 2.81681 -4.820306 2.813781 -4.860338 2.629235 -4.850331 2.500497 -4.97493 2.490157 -5.129829 1.967013 -5.104934 1.842774 -5.22743 1.817525 -5.440886 0.2161869 -5.379442 0.1069611 -5.488297 0.04530769 -4.056074 -3.539781 -3.937318 -3.578829 -3.976329 -3.697695 -1.038879 -5.218137 -0.9202099 -5.178826 -0.8809347 -5.297604 1.502602 -5.039753 1.59099 -4.951226 1.679379 -5.039753 2.75713 -4.407901 2.821122 -4.300221 2.9285 -4.364393 3.383821 -3.871054 3.432496 -3.755412 3.547629 -3.804301 3.737232 -3.450406 3.776022 -3.330848 3.894851 -3.369876 3.95835 -3.110634 3.990371 -2.988874 4.1112 -3.021141 4.107886 -2.824728 4.135012 -2.701625 4.257033 -2.728992 4.215086 -2.575497 4.238512 -2.451553 4.361298 -2.4752 4.295484 -2.351932 4.316005 -2.227479 4.439309 -2.248191 4.358024 -2.1466 4.376183 -2.021872 4.499857 -2.040186 2.604135 -2.208979 2.503448 -2.284831 2.530061 -2.105876 2.729966 -1.901864 2.63301 -1.98266 2.651071 -1.802571 2.520011 -2.476709 2.41477 -2.545746 2.452562 -2.368989 2.826846 -1.605165 2.733584 -1.690121 2.743616 -1.509969 2.44513 -2.718109 2.336498 -2.781296 2.38329 -2.60711 2.909453 -1.293714 2.820578 -1.382919 2.821554 -1.203519 2.380574 -2.940097 2.269385 -2.998286 2.323459 -2.826818 2.988973 -0.9131118 2.906876 -1.008146 2.894711 -0.8303424 2.327121 -3.146566 2.213994 -3.20053 2.273948 -3.031754 3.062405 -0.2568304 2.996779 -0.3635147 2.956017 -0.1910217 2.286525 -3.338551 2.171948 -3.389049 2.236554 -3.222767 1.95696 2.309619 1.999544 2.192054 1.839437 2.267019 2.264187 -3.513436 2.148604 -3.561333 2.216591 -3.397122 -2.316867 1.885829 -2.197962 1.924438 -2.278313 1.766756 2.280321 -3.658978 2.164332 -3.70571 2.233721 -3.542661 -2.73596 1.117908 -2.629552 1.183791 -2.670368 1.011029 2.434805 -3.711782 2.320206 -3.761733 2.384885 -3.597113 -2.849018 0.6974043 -2.748909 0.7728844 -2.774163 0.5964593 2.183667 4.004641 2.282088 3.927577 2.106606 3.906216 -2.896867 0.3723753 -2.800846 0.4533742 -2.816835 0.2751948 0.1594612 4.678723 0.2820969 4.654516 0.1352645 4.556036 -2.916423 0.121389 -2.822502 0.2050954 -2.833938 0.02607682 -0.0842912 4.802035 0.03950192 4.784694 -0.1016193 4.67815 -2.928215 -0.04174284 -2.833866 0.04159855 -2.84622 -0.1376417 -0.2218239 4.919224 -0.09754765 4.905777 -0.2352556 4.794812 -2.942613 -0.1949464 -2.847518 -0.112558 -2.861484 -0.2915175 -0.3101649 5.036495 -0.1856432 5.025555 -0.3210891 4.911797 -2.943162 0.4788185 -2.828792 0.5299112 -2.892718 0.3629768 -2.37304 1.850904 -2.249427 1.832061 -2.39161 1.725466 -1.951038 2.333142 -1.836887 2.281188 -2.001974 2.216709 -1.656165 2.63144 -1.551485 2.560486 -1.724481 2.522719 -1.716103 2.689023 -1.612526 2.615137 -1.78608 2.57966 -2.032674 2.596948 -1.924108 2.530758 -2.094628 2.480957 -2.556987 2.308701 -2.440005 2.26157 -2.601036 2.183532 -2.969776 2.080814 -2.848106 2.051371 -2.998439 1.955834 -1.038221 -3.605441 -1.050029 -3.480861 -0.9137797 -3.593619 -3.576903 -1.448509 -3.486178 -1.360739 -3.490916 -1.541115 -3.630292 -1.579687 -3.541567 -1.487804 -3.54224 -1.672273 -3.027206 -2.675445 -2.968763 -2.560835 -2.91671 -2.736065 -2.028785 -3.610873 -2.007741 -3.484206 -1.905569 -3.632506 -0.36205 -4.263406 -0.3915671 -4.139822 -0.240585 -4.233374 2.037807 -3.944147 1.94931 -3.855064 2.126087 -3.854846 3.5432 -2.893349 3.427273 -2.846214 3.589952 -2.776472 4.02942 -2.405078 3.908186 -2.374209 4.059873 -2.282191 4.36105 -2.034653 4.237513 -2.015203 4.380122 -1.908669 5.386135 2.370209 5.368745 2.24458 5.262351 2.387859 5.021739 2.949129 5.01589 2.8206 4.896876 2.95515 4.580118 3.480827 4.586201 3.35043 4.455266 3.474473 4.048968 3.97615 4.06768 3.84545 3.925377 3.956362 3.369604 4.465817 3.402958 4.337285 3.249136 4.430231 2.529157 4.903283 2.578727 4.780087 2.414406 4.850064 1.01196 5.310659 1.08807 5.203022 0.9128023 5.228041 -0.6211563 5.271157 -0.5225376 5.186213 -0.6979636 5.162091 -1.743618 4.96175 -1.633472 4.89509 -1.802718 4.837515 -2.511551 4.628438 -2.395449 4.575552 -2.55787 4.495877 -3.099733 4.327238 -2.980103 4.285827 -3.135977 4.190554 -3.628576 4.026302 -3.506421 3.99632 -3.655093 3.888187 -4.166064 3.655742 -4.041988 3.638902 -4.181233 3.518002 -4.724029 3.136258 -4.599032 3.135219 -4.724986 3.000501 -4.744236 3.049315 -4.619236 3.04908 -4.744457 2.916687 -4.839032 2.851918 -4.714085 2.855692 -4.83539 2.722429 -5.06497 2.366959 -4.940808 2.381642 -5.050519 2.240802 -5.498229 0.7561058 -5.383322 0.8054919 -5.449023 0.6407775 -4.127648 -3.621857 -4.087575 -3.503346 -4.009246 -3.661967 -0.9757157 -5.341314 -1.015522 -5.222711 -0.8572232 -5.301471 1.59099 -5.128151 1.502602 -5.039617 1.679379 -5.039617 2.837815 -4.488718 2.73082 -4.423897 2.902446 -4.381408 3.460341 -3.95343 3.34569 -3.903399 3.510142 -3.838247 3.812471 -3.53601 3.694132 -3.495491 3.852731 -3.416908 4.033525 -3.199923 3.913168 -3.165897 4.067277 -3.078591 4.183397 -2.918209 4.061827 -2.888854 4.21248 -2.795505 4.290934 -2.674004 4.168583 -2.64815 4.316533 -2.550436 4.371424 -2.456772 4.248544 -2.433622 4.394348 -2.332682 4.43359 -2.259761 4.310338 -2.238741 4.454422 -2.135395 4.609426 -1.762209 4.484918 -1.750878 4.620506 -1.634871 2.666695 -2.102918 2.585534 -2.005607 2.680601 -1.92253 2.6208 -2.345087 2.544942 -2.243463 2.644292 -2.165868 2.728019 -1.857658 2.644573 -1.762364 2.737641 -1.676922 2.589665 -2.559686 2.518028 -2.455109 2.620463 -2.381974 2.777173 -1.621567 2.691598 -1.528567 2.782714 -1.441222 2.577969 -2.749667 2.509462 -2.643251 2.614018 -2.573526 2.823488 -1.377249 2.735408 -1.287236 2.824103 -1.197847 2.593101 -2.913019 2.526516 -2.805763 2.632305 -2.738254 2.877344 -1.08669 2.785363 -1.001354 2.870008 -0.9086223 2.649972 -3.039303 2.583723 -2.932283 2.689723 -2.865396 2.956961 -0.6008133 2.856144 -0.5267163 2.930041 -0.4256259 2.784373 -3.096735 2.715837 -2.991612 2.820373 -2.922691 2.308362 1.862189 2.185737 1.837933 2.161486 1.960583 3.10368 -2.967839 3.026901 -2.868958 3.125542 -2.791993 -2.291535 1.811447 -2.247047 1.694474 -2.363863 1.649927 4.022871 -1.831209 3.917549 -1.763848 3.984872 -1.658468 -2.723774 0.9441971 -2.648906 0.8436692 -2.749005 0.768481 2.539194 3.765002 2.455736 3.671924 2.362678 3.7554 -2.814573 0.471993 -2.729281 0.3798643 -2.820661 0.2938736 0.3064522 4.652931 0.2816147 4.530372 0.1591071 4.55522 -2.829245 0.1877923 -2.740432 0.09877597 -2.828394 0.00889908 0.06666783 4.78432 0.04863804 4.660534 -0.07505483 4.678578 -2.828351 0.004248351 -2.739315 -0.08478185 -2.827051 -0.1751305 -0.0679877 4.906173 -0.08216663 4.781842 -0.2063599 4.796036 -2.833493 -0.02924268 -2.749129 -0.1229898 -2.841366 -0.2087357 -0.1537161 5.026499 -0.1654294 4.901871 -0.2898794 4.913601 -2.850439 0.01445637 -2.773693 -0.08575623 -2.87236 -0.1637036 -2.86568 0.2677727 -2.804815 0.1572714 -2.913996 0.09567036 -2.667697 1.16966 -2.652092 1.044597 -2.776115 1.028862 -2.255924 1.905231 -2.283568 1.782225 -2.405473 1.810118 -1.736915 2.486486 -1.797589 2.374818 -1.906876 2.436813 -1.733989 2.587572 -1.799258 2.476995 -1.905865 2.544695 -2.067164 2.472348 -2.123118 2.355189 -2.234895 2.413837 -2.557016 2.167055 -2.595126 2.041199 -2.714175 2.081488 -2.271763 2.544743 -2.328503 2.424792 -2.439883 2.485897 -1.749625 3.096743 -1.828531 2.995672 -1.925479 3.077934 -1.321716 -3.349111 -1.196731 -3.347133 -1.194833 -3.477403 -2.939607 -2.388961 -2.829792 -2.451493 -2.889503 -2.566496 -3.818776 -1.116517 -3.74324 -1.219568 -3.842836 -1.297724 -3.592659 -1.957681 -3.498841 -2.041836 -3.581442 -2.13742 -2.789678 -3.161977 -2.674022 -3.209663 -2.721443 -3.325963 1.95485 -3.877929 2.043004 -3.789134 2.131626 -3.877459 3.771705 -2.385243 3.802934 -2.263618 3.92397 -2.294999 4.390729 -1.319733 4.389272 -1.193861 4.514264 -1.192393 4.551797 -1.184892 4.546756 -1.058388 4.671654 -1.053282 4.602564 -1.430892 4.60455 -1.303488 4.729535 -1.305513 5.283248 2.551588 5.158855 2.564253 5.171156 2.692326 4.994749 2.953208 4.869839 2.958159 4.87458 3.088612 4.600211 3.42121 4.475327 3.415522 4.469943 3.547445 4.080338 3.927164 3.956609 3.90826 3.938835 4.039854 3.370119 4.465281 3.249648 4.429708 3.216307 4.55825 2.396951 4.992666 2.283495 4.93674 2.231029 5.057677 0.61149 5.420271 0.5179273 5.331365 0.4350357 5.431716 -1.058775 5.25745 -1.127767 5.143279 -1.232003 5.218848 -2.146113 4.86619 -2.196855 4.738314 -2.311093 4.795114 -2.898034 4.472857 -2.935383 4.337494 -3.054673 4.379876 -3.503847 4.098654 -3.529726 3.959634 -3.652018 3.989053 -4.062636 3.695599 -4.076729 3.55567 -4.200932 3.571546 -4.60117 3.205901 -4.602017 3.067331 -4.727014 3.06827 -4.926378 2.859989 -4.919585 2.7244 -5.044401 2.717021 -4.930888 2.775418 -4.923622 2.642988 -5.04841 2.635277 -4.948632 2.672606 -4.94026 2.543337 -5.064979 2.534659 -4.997331 2.501277 -4.986246 2.37478 -5.110753 2.363518 -5.173328 1.996546 -5.151851 1.873013 -5.274992 1.851467 -5.291884 -1.459506 -5.202795 -1.547205 -5.290478 -1.636311 1.502602 -5.217367 1.59099 -5.128962 1.679379 -5.217367 3.72394 -3.869808 3.763215 -3.75103 3.881884 -3.790341 4.200422 -3.253662 4.224618 -3.130751 4.347254 -3.155002 4.392233 -2.889715 4.409561 -2.76543 4.533354 -2.782827 4.49416 -2.624105 4.507592 -2.499102 4.631868 -2.512612 4.55704 -2.406207 4.567964 -2.280743 4.692486 -2.29175 4.599602 -2.215164 4.608775 -2.089395 4.733438 -2.098649 4.6303 -2.040909 4.638176 -1.914963 4.762928 -1.922914 4.653496 -1.877929 4.660367 -1.751926 4.785178 -1.758863 4.671673 -1.722829 4.677735 -1.596876 4.802588 -1.602991 4.620692 -1.733619 4.630982 -1.606215 4.755557 -1.616738 2.530009 -2.263007 2.430171 -2.339834 2.454794 -2.16103 2.694726 -1.904606 2.600242 -1.988345 2.612887 -1.807927 2.423863 -2.545306 2.318858 -2.614513 2.356047 -2.438147 2.816727 -1.553668 2.727695 -1.643309 2.728987 -1.462708 2.319056 -2.80426 2.210048 -2.866564 2.257883 -2.693237 2.914835 -1.177687 2.832415 -1.27328 2.820857 -1.09385 2.213784 -3.047688 2.101579 -3.103633 2.158692 -2.933748 2.994731 -0.7149129 2.922259 -0.8178939 2.892883 -0.6416344 2.10254 -3.282141 1.987671 -3.332018 2.053243 -3.16592 3.020711 0.003785197 2.968326 -0.1103187 2.907217 0.05645147 1.972761 -3.514742 1.855511 -3.558411 1.929433 -3.396569 2.528419 1.555665 2.536678 1.430717 2.403692 1.547391 1.791075 -3.75816 1.671415 -3.79446 1.754931 -3.637987 -0.4384512 2.889893 -0.3228119 2.842367 -0.4859123 2.774096 1.427571 -4.049342 1.304886 -4.073325 1.403625 -3.926464 -2.149935 1.923449 -2.028626 1.953691 -2.119781 1.801786 -0.3079604 -4.409329 -0.4301714 -4.383067 -0.2817028 -4.287098 -2.651916 1.060203 -2.54434 1.124238 -2.588258 0.9519902 -2.155386 3.997584 -2.038237 4.041191 -2.111785 3.880418 -2.791901 0.5056532 -2.694339 0.5845612 -2.713755 0.4071403 -1.502769 4.41455 -1.380102 4.438599 -1.47873 4.291831 -2.82543 0.1519858 -2.733068 0.2373343 -2.741203 0.05839338 -1.273167 4.612657 -1.149358 4.629883 -1.255953 4.488755 -2.833464 -0.03084639 -2.741276 0.05495523 -2.749046 -0.1245454 -1.141728 4.772351 -1.017442 4.785706 -1.128387 4.647927 -2.845742 -0.1455897 -2.751639 -0.0618958 -2.763464 -0.2413117 -1.056569 4.916995 -0.9320404 4.927859 -1.045721 4.792288 -2.869603 -0.2037935 -2.772027 -0.1244848 -2.791476 -0.3028449 -2.915395 0.02521392 -2.807733 0.08943196 -2.851883 -0.08364488 -2.773526 1.03538 -2.649466 1.050809 -2.758231 0.9102369 -2.347033 1.887854 -2.226054 1.856184 -2.378483 1.766029 -1.8105 2.512725 -1.703609 2.446685 -1.875301 2.403789 -1.867213 2.575273 -1.761571 2.506077 -1.934032 2.465873 -2.21985 2.428906 -2.108408 2.369623 -2.276469 2.312221 -2.673488 2.139427 -2.555162 2.097062 -2.713787 2.015033 -0.684125 3.472693 -0.6192208 3.363482 -0.790954 3.406341 1.988683 3.103172 1.96121 2.97903 1.86674 3.13114 -1.738161 -3.258228 -1.720916 -3.129091 -1.614357 -3.276216 -3.0748 -2.353291 -3.007396 -2.24262 -2.96953 -2.424152 -3.00548 -2.699133 -2.943556 -2.588711 -2.896896 -2.762106 -1.324872 -3.935031 -1.322526 -3.808723 -1.199894 -3.937403 1.390118 -4.052982 1.317222 -3.950704 1.491662 -3.979559 3.667148 -2.463372 3.54809 -2.424894 3.70523 -2.343076 4.214687 -1.677583 4.090416 -1.663754 4.228167 -1.550098 4.505719 -1.185272 4.38072 -1.185874 4.505142 -1.054749 4.647451 -1.107808 4.522472 -1.110271 4.645126 -0.9754465 4.739119 -1.221167 4.614124 -1.220027 4.74019 -1.088023 5.409088 2.458698 5.391424 2.331529 5.285342 2.476851 5.035135 3.031602 5.028985 2.901324 4.910286 3.038018 4.560987 3.589396 4.567506 3.457392 4.436157 3.582502 3.956196 4.138077 3.977028 4.006459 3.832944 4.115831 3.193863 4.657029 3.230729 4.528712 3.074423 4.617423 2.171782 5.129949 2.227857 5.009436 2.060066 5.06946 0.4969954 5.46109 0.5803945 5.35988 0.4038847 5.370437 -1.098645 5.299392 -0.9951921 5.221776 -1.168806 5.184946 -2.201606 4.919397 -2.087927 4.860861 -2.253589 4.791389 -2.990192 4.520311 -2.871106 4.477096 -3.028186 4.384862 -3.625584 4.128304 -3.503296 4.098867 -3.651478 3.989282 -4.201897 3.701077 -4.077643 3.685727 -4.215534 3.561225 -4.74041 3.191806 -4.615411 3.191428 -4.740752 3.053338 -5.069505 2.82719 -4.944732 2.835365 -5.061973 2.691755 -5.076895 2.73527 -4.952162 2.74393 -5.068729 2.602993 -5.099472 2.619238 -4.974837 2.62913 -5.089924 2.490129 -5.15785 2.418823 -5.033514 2.431888 -5.144986 2.292546 -5.358263 1.803565 -5.235881 1.829095 -5.332811 1.68081 -5.403471 -1.462852 -5.314382 -1.375153 -5.315788 -1.551958 1.59099 -5.305796 1.502602 -5.21739 1.679379 -5.21739 3.825089 -3.926271 3.706597 -3.886428 3.864895 -3.807668 4.30588 -3.300401 4.183372 -3.275506 4.330718 -3.177614 4.499486 -2.932513 4.375793 -2.914411 4.517515 -2.808321 4.602494 -2.665087 4.478301 -2.650824 4.616673 -2.540157 4.666136 -2.446485 4.541686 -2.434682 4.677849 -2.321079 4.709264 -2.255514 4.584665 -2.245418 4.71927 -2.129792 4.740373 -2.082016 4.61568 -2.073171 4.749133 -1.956111 4.76383 -1.920525 4.639075 -1.912632 4.771648 -1.794558 4.782098 -1.767813 4.657299 -1.760661 4.789186 -1.641893 4.828713 -1.510148 4.703776 -1.506084 4.832695 -1.382675 4.806602 -1.416087 4.681757 -1.409475 4.812835 -1.283659 2.635532 -2.11734 2.55165 -2.022713 2.644325 -1.937064 2.578732 -2.370924 2.50057 -2.271439 2.598118 -2.191726 2.722798 -1.826299 2.635229 -1.735039 2.724429 -1.645447 2.531131 -2.599943 2.457815 -2.496923 2.559056 -2.422318 2.790984 -1.539442 2.699887 -1.451992 2.785481 -1.358921 2.49641 -2.807802 2.427063 -2.702303 2.531063 -2.631957 2.852242 -1.23054 2.757017 -1.148169 2.837993 -1.051303 2.479786 -2.994437 2.413464 -2.887348 2.519419 -2.820317 2.913193 -0.8481293 2.812109 -0.7737812 2.885641 -0.6715756 2.492152 -3.153935 2.427657 -3.046121 2.534734 -2.981181 2.959538 -0.2421054 2.848492 -0.1844158 2.905882 -0.07279021 2.563734 -3.264384 2.499007 -3.15707 2.605944 -3.092114 2.68973 1.113402 2.564919 1.120281 2.571789 1.245265 2.819461 -3.219141 2.748655 -3.116013 2.851667 -3.045128 -0.9740524 2.687643 -0.9932814 2.564058 -1.116794 2.583298 4.326402 -0.8210853 4.207028 -0.7840034 4.244108 -0.664622 -2.571751 1.141679 -2.501841 1.037825 -2.605464 0.9677606 -1.456041 4.285179 -1.429159 4.163084 -1.551235 4.136198 -2.744978 0.4291561 -2.655571 0.341313 -2.742929 0.2514093 -1.414906 4.427672 -1.389906 4.305146 -1.512381 4.280135 -2.752386 0.07491001 -2.657353 -0.007078472 -2.738554 -0.1030333 -1.184377 4.621121 -1.166228 4.497353 -1.289903 4.47919 -2.735093 -0.1584554 -2.638288 -0.2385976 -2.717369 -0.3367022 -1.053459 4.778009 -1.039184 4.653691 -1.163366 4.6394 -2.726348 -0.259462 -2.631764 -0.3425299 -2.713487 -0.4386699 -0.9695795 4.920742 -0.9577843 4.796124 -1.082227 4.784312 -2.735236 -0.2876011 -2.645386 -0.3759924 -2.732288 -0.4673821 -2.763568 -0.2415362 -2.681328 -0.3370846 -2.775463 -0.4205587 -2.808625 -0.04180928 -2.740102 -0.1474902 -2.844646 -0.2167588 -2.823632 0.4079362 -2.779737 0.29013 -2.896777 0.245948 -2.703886 1.064605 -2.69539 0.9394993 -2.820101 0.9309769 -2.046173 2.204517 -2.094849 2.088496 -2.209982 2.137547 -2.007451 2.367825 -2.063585 2.254181 -2.175272 2.311299 -2.318329 2.218242 -2.364899 2.098809 -2.4809 2.146757 -2.69443 1.950771 -2.726816 1.825184 -2.847548 1.858872 -1.077738 3.001144 -1.174789 2.913514 -1.253568 3.02147 0.1425255 3.414546 0.02367964 3.37343 -0.01505863 3.499572 -0.8551997 -3.345963 -0.7309667 -3.331178 -0.7171406 -3.464033 -1.606233 -3.197738 -1.48186 -3.211245 -1.49436 -3.345631 -3.935503 -0.3574894 -3.885052 -0.4739274 -3.999418 -0.5252928 -3.946744 -0.9937048 -3.880308 -1.100205 -3.986192 -1.167028 -1.681637 -3.837552 -1.557393 -3.851281 -1.571117 -3.975564 4.308569 0.1180571 4.269006 0.2369557 4.38758 0.2766269 4.376439 0.6752428 4.323001 0.7895487 4.436003 0.8436031 4.546831 0.16033 4.507982 0.2824973 4.626792 0.3224442 4.650991 -0.4148301 4.628639 -0.2862335 4.751624 -0.2628619 4.674595 -0.999776 4.669206 -0.8675035 4.794089 -0.861795 4.627005 -1.556009 4.637596 -1.423282 4.762147 -1.434569 5.331776 2.577968 5.207558 2.592546 5.221523 2.722219 5.014559 3.027389 4.889673 3.033029 4.895006 3.165086 4.574688 3.548015 4.449835 3.541553 4.443774 3.674671 3.967154 4.122012 3.84385 4.100058 3.823329 4.231979 3.141771 4.705834 3.022727 4.665052 2.984601 4.792386 1.874337 5.290746 1.765826 5.224678 1.703775 5.340214 -0.0219957 5.542195 -0.1073226 5.44418 -0.1986699 5.535735 -1.627024 5.229688 -1.687253 5.109718 -1.796786 5.175686 -2.690609 4.750452 -2.731889 4.618602 -2.849876 4.664732 -3.453934 4.271943 -3.480437 4.133786 -3.602595 4.16376 -4.077431 3.793108 -4.090672 3.652316 -4.214969 3.667315 -4.615784 3.286217 -4.616101 3.145703 -4.741101 3.146059 -4.929798 2.946306 -4.923135 2.807998 -5.047957 2.800615 -5.043096 2.867125 -5.036629 2.731635 -5.161461 2.724616 -4.98049 2.887926 -4.976329 2.755463 -5.10126 2.751051 -4.900767 2.949049 -4.899729 2.819592 -5.024725 2.818518 -4.761574 3.103277 -4.766164 2.976429 -4.891079 2.981089 -4.290758 3.666525 -4.312829 3.543118 -4.435865 3.565256 -1.502602 5.387148 -1.59099 5.298742 -1.679379 5.387148 5.315788 1.54496 5.228106 1.634065 5.317195 1.721765 5.411734 -0.8604931 5.372723 -0.7416268 5.491479 -0.702579 5.240741 -1.401571 5.216702 -1.278625 5.339369 -1.254532 5.138401 -1.565627 5.121187 -1.441319 5.244996 -1.424036 5.073952 -1.599715 5.060611 -1.474692 5.184897 -1.461272 5.030178 -1.574544 5.019329 -1.44906 5.143858 -1.438128 4.998643 -1.517812 4.989537 -1.392022 5.114205 -1.382833 4.974887 -1.442644 4.967071 -1.316676 5.091826 -1.308784 4.956353 -1.356082 4.949537 -1.230057 5.074351 -1.223175 4.941473 -1.26224 4.935464 -1.136265 5.06032 -1.130202 4.659971 -1.956112 4.675904 -1.829577 4.799885 -1.845838 4.541257 -2.050915 4.56543 -1.920563 4.68807 -1.946255 2.421246 -2.356779 2.321129 -2.432976 2.346403 -2.254853 2.635929 -1.947513 2.542901 -2.03278 2.552437 -1.852508 2.282634 -2.654494 2.176723 -2.722006 2.216241 -2.546797 2.785075 -1.549863 2.699144 -1.642595 2.694295 -1.462084 2.133481 -2.932418 2.022918 -2.991597 2.075162 -2.820224 2.895422 -1.128279 2.817848 -1.228134 2.797405 -1.049251 1.965424 -3.199604 1.850971 -3.250457 1.91517 -3.083787 2.970977 -0.6258625 2.9052 -0.7336284 2.864683 -0.5591751 1.758635 -3.465415 1.640731 -3.507291 1.717118 -3.346492 2.970753 0.07688271 2.925473 -0.04060212 2.854243 0.1225416 1.464876 -3.741767 1.343686 -3.772561 1.434247 -3.619925 2.663446 1.180141 2.659468 1.054689 2.538509 1.184135 0.9384865 -4.042337 0.8142041 -4.055746 0.925112 -3.917729 1.358065 2.517939 1.425716 2.412622 1.252954 2.450156 -0.4665827 -4.253404 -0.588394 -4.225328 -0.438529 -4.131496 -1.268234 2.514459 -1.143797 2.502597 -1.280077 2.389829 -4.203874 -1.31189 -4.227292 -1.189083 -4.081087 -1.288469 -2.465701 1.286509 -2.354162 1.343129 -2.409274 1.17459 -3.505898 2.862421 -3.416594 2.949911 -3.418434 2.773091 -2.704559 0.531332 -2.608497 0.6118688 -2.624577 0.4346037 -2.764159 3.737349 -2.656218 3.800421 -2.701121 3.62935 -2.740791 0.08350223 -2.653299 0.1737582 -2.651515 -0.004950253 -2.279863 4.190727 -2.16438 4.238608 -2.232023 4.075146 -2.735544 -0.1549876 -2.650164 -0.0623131 -2.644246 -0.2416547 -1.961044 4.482871 -1.841983 4.520988 -1.922972 4.363668 -2.730162 -0.3282268 -2.644644 -0.2354453 -2.638993 -0.4152573 -1.739625 4.704278 -1.618631 4.735716 -1.708234 4.583103 -2.735405 -0.4497104 -2.647919 -0.3588788 -2.646122 -0.5387138 -2.762018 -0.4988425 -2.670261 -0.4127606 -2.677132 -0.5918927 -2.823681 -0.4043221 -2.723926 -0.3283181 -2.748355 -0.5049754 -2.906534 -0.0725427 -2.795042 -0.01579864 -2.850013 -0.1844756 -2.863825 0.7898717 -2.739704 0.8046822 -2.84903 0.6656208 -1.894415 2.42556 -1.78702 2.361311 -1.958378 2.317685 -1.979844 2.485912 -1.873013 2.420159 -2.044745 2.377677 -2.30422 2.344385 -2.192133 2.287891 -2.35955 2.229939 -2.700015 2.08176 -2.581953 2.039659 -2.741081 1.960722 -1.774659 2.71164 -1.681244 2.616067 -1.857718 2.604151 -2.378832 2.417831 -2.272665 2.339466 -2.444814 2.291739 -3.34925 -1.549822 -3.261336 -1.449259 -3.26039 -1.649314 -2.01588 -3.090631 -1.985023 -2.959239 -1.894748 -3.124101 -2.312945 -3.295404 -2.27269 -3.17667 -2.194603 -3.335791 1.077712 -4.004582 1.013817 -3.896906 1.185147 -3.940544 3.825757 -1.895241 3.703165 -1.870625 3.850176 -1.771661 4.341231 -0.6776576 4.216858 -0.6905023 4.328725 -0.5499178 4.510105 -0.2419891 4.387477 -0.2675572 4.485868 -0.1126271 4.62896 -0.2710291 4.505999 -0.2953605 4.606476 -0.1379619 4.725746 -0.5097678 4.601628 -0.5260828 4.710929 -0.3731028 4.783141 -0.8717899 4.658204 -0.8761608 4.779196 -0.7333761 4.788139 -1.310482 4.663423 -1.301202 4.79656 -1.173039 5.444403 2.523072 5.425905 2.394311 5.32078 2.54234 5.052535 3.114198 5.045953 2.982329 4.927709 3.121151 4.5368 3.712911 4.543888 3.579703 4.412001 3.705345 3.858833 4.312434 3.881757 4.180491 3.735953 4.287819 2.984442 4.875145 3.025341 4.748021 2.866322 4.831128 1.657643 5.389471 1.72263 5.27421 1.550864 5.31932 -0.09771724 5.581007 -0.006595486 5.488144 -0.1832849 5.482117 -1.686648 5.286003 -1.577649 5.218628 -1.747839 5.165989 -2.789654 4.799498 -2.671818 4.75282 -2.831362 4.667617 -3.594333 4.294454 -3.472074 4.26505 -3.620365 4.156361 -4.244573 3.778973 -4.120147 3.765449 -4.256545 3.638421 -4.757873 3.269486 -4.632873 3.269791 -4.757601 3.129109 -5.071003 2.917933 -4.946216 2.926012 -5.063706 2.779781 -5.249384 2.740792 -5.12477 2.7514 -5.239577 2.606009 -5.196802 2.744074 -5.072055 2.752488 -5.188842 2.612199 -5.135767 2.774528 -5.010894 2.78035 -5.130133 2.645493 -5.039566 2.868533 -4.914577 2.870224 -5.037899 2.741798 -4.71047 3.308627 -4.586013 3.296955 -4.722113 3.183869 -1.59099 5.475641 -1.502602 5.387236 -1.679379 5.387236 5.37816 1.718419 5.290478 1.629313 5.289072 1.806118 5.537564 -0.7719028 5.419162 -0.8120131 5.497491 -0.6533919 5.373982 -1.335429 5.251508 -1.360486 5.348982 -1.212679 5.273704 -1.508652 5.150028 -1.526873 5.255554 -1.384483 5.209966 -1.547394 5.085783 -1.561753 5.19569 -1.422485 5.166414 -1.524675 5.041972 -1.536559 5.154619 -1.39929 5.134903 -1.469014 5.01031 -1.479182 5.124824 -1.343314 5.111094 -1.393848 4.986406 -1.402759 5.102267 -1.267966 5.092502 -1.306283 4.967751 -1.314239 5.084621 -1.180339 5.077613 -1.210312 4.952818 -1.217525 5.070463 -1.084416 5.04857 -1.176645 4.92366 -1.181456 5.043827 -1.049941 4.877848 -1.598898 4.753286 -1.587943 4.888309 -1.468458 4.74546 -1.78131 4.622178 -1.7589 4.76611 -1.647519 2.579823 -2.168015 2.494415 -2.075126 2.585687 -1.988204 2.505532 -2.434586 2.426418 -2.336242 2.523195 -2.255847 2.6973 -1.833094 2.606827 -1.74501 2.693081 -1.652619 2.432332 -2.681072 2.358925 -2.578499 2.4601 -2.504078 2.782331 -1.504962 2.687272 -1.422035 2.768443 -1.324921 2.360435 -2.912493 2.292229 -2.806609 2.396981 -2.737665 2.851176 -1.15285 2.751103 -1.076533 2.826004 -0.9745699 2.287602 -3.133226 2.224236 -3.024668 2.331984 -2.960826 2.907042 -0.7267065 2.80059 -0.6602775 2.86611 -0.5523499 2.206692 -3.348714 2.148084 -3.237825 2.258492 -3.178962 2.922766 -0.1173726 2.80749 -0.06863996 2.855827 0.04757854 2.092217 -3.572619 2.039103 -3.459261 2.152258 -3.406052 2.796218 0.5948624 2.673711 0.619783 2.698553 0.7426762 1.750354 -3.891602 1.709291 -3.773507 1.827353 -3.732431 1.907545 2.048663 1.790384 2.005073 1.746816 2.122294 -3.976776 1.860992 -3.868509 1.798513 -3.930985 1.69024 -2.229062 1.607723 -2.177727 1.493669 -2.2917 1.442297 -3.477323 2.878161 -3.38802 2.790672 -3.475483 2.701341 -2.67611 0.4035626 -2.583145 0.319744 -2.666705 0.226491 -2.746123 3.73612 -2.680531 3.629655 -2.786939 3.564028 -2.674584 -0.03016385 -2.573794 -0.1046195 -2.647729 -0.2061192 -2.267743 4.18444 -2.217087 4.070069 -2.331363 4.019371 -2.638958 -0.2843566 -2.535828 -0.3557927 -2.606462 -0.4600946 -1.953643 4.474176 -1.912633 4.355958 -2.030714 4.3149 -2.609686 -0.4371692 -2.507034 -0.5096049 -2.578361 -0.6138529 -1.737369 4.693845 -1.702947 4.573503 -1.823114 4.539031 -2.601976 -0.5093599 -2.502248 -0.5860792 -2.57761 -0.6876031 -2.611451 -0.5550087 -2.515793 -0.6368796 -2.596257 -0.7342098 -2.646804 -0.5381652 -2.557629 -0.6269924 -2.645224 -0.7174221 -2.719489 -0.3624236 -2.642909 -0.4621053 -2.741704 -0.539373 -2.783408 0.2537556 -2.737949 0.1368189 -2.85439 0.09116593 -2.470834 1.428681 -2.485851 1.304376 -2.609946 1.319419 -2.353268 1.81097 -2.386412 1.690311 -2.506938 1.723492 -2.378646 1.959715 -2.417757 1.840556 -2.536481 1.879811 -2.503075 1.978557 -2.541848 1.858459 -2.660682 1.897644 -2.763074 1.81198 -2.793113 1.68846 -2.91445 1.719039 -1.872533 2.78925 -1.94935 2.687095 -2.047961 2.766673 -2.483835 2.325287 -2.541134 2.202123 -2.652227 2.265648 -3.671877 -0.103069 -3.627797 -0.2383824 -3.744767 -0.2893748 -3.789932 -0.1697923 -3.74644 -0.2959004 -3.86363 -0.3427021 -3.746591 1.131174 -3.746968 1.005504 -3.871967 1.005883 -3.724077 1.552666 -3.73812 1.428414 -3.862329 1.442462 2.842025 3.022391 2.731094 3.080163 2.788708 3.191398 3.653873 2.182851 3.563138 2.270233 3.649115 2.36245 4.133634 1.438093 4.062411 1.544869 4.165135 1.618901 4.444402 0.7600173 4.39211 0.8807958 4.505646 0.9364241 4.638337 0.03888845 4.607023 0.1701585 4.728037 0.2041265 4.695696 -0.701108 4.686426 -0.563955 4.811082 -0.5537553 4.620986 -1.410985 4.632913 -1.27303 4.757343 -1.286253 4.454649 -2.049981 4.485069 -1.915682 4.606311 -1.949379 5.384674 2.605987 5.260666 2.622602 5.276383 2.753696 5.037619 3.108644 4.912764 3.115058 4.918773 3.248319 4.543561 3.693242 4.41875 3.68588 4.411879 3.819623 3.840267 4.33739 3.717481 4.312307 3.694055 4.443777 2.853314 4.981688 2.736269 4.934887 2.692388 5.059719 1.167166 5.576876 1.066307 5.498453 0.9924661 5.605571 -0.8109817 5.596003 -0.8849389 5.488154 -0.9857126 5.567304 -2.333465 5.114749 -2.381607 4.988932 -2.496964 5.041439 -3.363178 4.512799 -3.391294 4.377367 -3.51309 4.408631 -4.100747 3.924713 -4.112935 3.784717 -4.237339 3.798432 -4.632959 3.396374 -4.632694 3.255313 -4.757693 3.255013 -4.939448 3.043907 -4.932675 2.903693 -5.057491 2.896084 -5.101793 2.873205 -5.09329 2.735122 -5.218 2.725707 2.264069 -2.493765 2.162274 -2.567344 2.191526 -2.390516 2.550942 -2.029452 2.458238 -2.114844 2.467091 -1.935043 2.074883 -2.808499 1.966633 -2.871814 2.012379 -2.698846 2.735402 -1.58712 2.651469 -1.681625 2.642773 -1.501488 1.855182 -3.106488 1.741614 -3.159285 1.802959 -2.991673 2.860877 -1.129205 2.786796 -1.231831 2.760194 -1.053694 1.582396 -3.396841 1.464289 -3.438125 1.541461 -3.277728 2.935859 -0.6053722 2.874762 -0.7161604 2.826807 -0.5433027 1.204532 -3.686353 1.082493 -3.713558 1.177486 -3.563599 2.925903 0.06603848 2.88479 -0.05333939 2.807858 0.1076154 0.5897338 -3.96585 0.464915 -3.9726 0.5830064 -3.840606 2.750939 0.7854186 2.734373 0.6606435 2.627042 0.8021019 -0.6503279 -4.088231 -0.7714938 -4.057461 -0.619606 -3.966876 2.299036 1.601307 2.316641 1.47717 2.175282 1.583648 -3.090753 -2.941921 -3.16863 -2.844083 -2.992977 -2.863994 0.5120421 2.703051 0.6066351 2.621252 0.4303282 2.608359 -4.371183 0.4148496 -4.348725 0.5378667 -4.248217 0.392382 -2.076265 1.741272 -1.956392 1.776758 -2.040832 1.621219 -4.027472 2.038664 -3.96188 2.145129 -3.921064 1.973037 -2.608279 0.6093997 -2.512187 0.68968 -2.528332 0.5129072 -3.484322 3.059522 -3.394454 3.146471 -3.397437 2.969587 -2.657428 0.04576409 -2.57427 0.139856 -2.564103 -0.03807676 -2.995228 3.698974 -2.892131 3.769729 -2.924545 3.595771 -2.637148 -0.2502551 -2.558474 -0.1518755 -2.540011 -0.329935 -2.6112 4.125326 -2.50071 4.183858 -2.552745 4.01469 -2.613204 -0.4542288 -2.535674 -0.3545083 -2.515153 -0.5330796 -2.316238 4.437046 -2.201367 4.486415 -2.266947 4.321992 -2.594294 -0.6249404 -2.516523 -0.52525 -2.496433 -0.7041658 -2.587 -0.7651067 -2.507689 -0.6668346 -2.490384 -0.8457772 -2.603043 -0.8551277 -2.520268 -0.7602658 -2.509376 -0.9389583 -2.670929 -0.8185078 -2.580568 -0.7315275 -2.584559 -0.9095073 -2.834758 -0.3474836 -2.726699 -0.2845092 -2.771924 -0.4557843 -2.539852 1.450879 -2.416674 1.429601 -2.561112 1.327596 -2.432989 1.826752 -2.313946 1.788594 -2.471116 1.707609 -2.110933 2.350011 -2.002341 2.288011 -2.172842 2.24126 -2.074234 2.532296 -1.969453 2.4639 -2.142395 2.427153 -2.443142 2.352186 -2.331734 2.295213 -2.499827 2.240211 -2.143768 2.689626 -2.042611 2.613311 -2.217201 2.584498 -2.640841 2.280081 -2.53001 2.216099 -2.698646 2.157407 -3.190451 -1.913718 -3.105811 -1.818229 -3.098468 -2.001584 -1.66644 -3.471441 -1.643668 -3.347323 -1.543532 -3.494437 -3.795129 1.269862 -3.670438 1.261076 -3.803915 1.14517 4.041395 -0.8152771 3.916524 -0.8209824 4.035701 -0.690142 4.213681 0.5025358 4.096623 0.4579268 4.169837 0.6216349 4.277099 0.8430302 4.163652 0.7880242 4.224615 0.9619291 4.417544 0.7470304 4.302506 0.6941231 4.368645 0.8714989 4.585473 0.4522097 4.466874 0.408402 4.545988 0.5837906 4.731741 0.002262233 4.60937 -0.02643567 4.706239 0.1399722 4.804845 -0.5747201 4.680108 -0.5838738 4.79674 -0.4338476 4.766238 -1.241378 4.641737 -1.228841 4.777399 -1.10153 4.608791 -1.942907 4.487514 -1.909335 4.639073 -1.808457 5.489906 2.57465 5.470169 2.444491 5.366474 2.595462 5.074367 3.201072 5.067217 3.068008 4.949572 3.208696 4.506178 3.855613 4.513996 3.721781 4.381423 3.847227 3.733396 4.518845 3.758995 4.387383 3.611045 4.491339 2.689954 5.137665 2.736435 5.013114 2.573917 5.087774 1.005113 5.640944 1.080448 5.533663 0.9053652 5.559919 -0.837281 5.631586 -0.7374415 5.550148 -0.9124945 5.523484 -2.3721 5.173861 -2.257424 5.119298 -2.421844 5.048076 -3.467695 4.550967 -3.345998 4.519193 -3.496239 4.415499 -4.221372 3.94098 -4.096978 3.927139 -4.233669 3.800965 -4.736971 3.416102 -4.611972 3.415612 -4.737405 3.274871 -5.079251 3.018243 -4.954466 3.026473 -5.071919 2.878181 -5.302112 2.756826 -5.177648 2.769571 -5.290558 2.619534 2.493376 -2.25748 2.407776 -2.165122 2.498868 -2.078332 2.389591 -2.541349 2.311203 -2.44279 2.40857 -2.363443 2.652706 -1.87527 2.560405 -1.789427 2.644699 -1.69543 2.272689 -2.811798 2.201402 -2.708093 2.304082 -2.636095 2.757403 -1.510075 2.659552 -1.430701 2.737335 -1.330849 2.132954 -3.07747 2.069016 -2.969272 2.176426 -2.904865 2.833241 -1.126573 2.729814 -1.055002 2.800013 -0.9495525 1.943762 -3.351482 1.888287 -3.238946 2.000303 -3.183213 2.884252 -0.6792637 2.774418 -0.618628 2.834096 -0.5070317 1.621448 -3.661197 1.578106 -3.543684 1.695351 -3.500242 2.880187 -0.1551705 2.763678 -0.1093914 2.808962 0.008390305 0.75279 -4.063274 0.737497 -3.939132 0.861558 -3.923828 2.805045 0.1682122 2.686032 0.2066486 2.724252 0.3263354 -3.53976 -2.366221 -3.43193 -2.302986 -3.368703 -2.410829 2.670369 0.6307331 2.547729 0.654946 2.571902 0.7777847 -4.338927 0.6116298 -4.21636 0.5870768 -4.240903 0.4644601 0.243156 2.675828 0.1751555 2.57093 0.07027013 2.638939 -4.01195 2.050139 -3.904009 1.987068 -3.967046 1.879068 -2.612839 0.3649519 -2.515932 0.2859029 -2.594889 0.1888829 -3.498855 3.030226 -3.408988 2.943276 -3.495872 2.853342 -2.594484 -0.1655763 -2.487491 -0.230494 -2.552125 -0.3379567 -3.036076 3.655184 -2.961458 3.554797 -3.061743 3.480103 -2.538583 -0.4147057 -2.429463 -0.4762188 -2.490438 -0.5863025 -2.671782 4.077258 -2.608812 3.969138 -2.716792 3.906087 -2.487128 -0.5913368 -2.377684 -0.6525552 -2.438075 -0.7634991 -2.393128 4.38777 -2.338932 4.274957 -2.451572 4.220677 -2.464497 -0.6727975 -2.357239 -0.738135 -2.42143 -0.8473083 -2.452008 -0.7580426 -2.347165 -0.8274207 -2.41523 -0.9342862 -2.455229 -0.8316644 -2.35347 -0.905526 -2.426065 -1.00906 -2.486556 -0.8609954 -2.389593 -0.9408955 -2.468479 -1.039105 -2.574604 -0.7519461 -2.487515 -0.8422487 -2.577185 -0.9299519 -2.740646 -0.06781028 -2.686582 -0.1807779 -2.799286 -0.2349686 -2.449195 1.373389 -2.467595 1.249652 -2.591234 1.268068 -2.271442 1.841952 -2.312341 1.723723 -2.430461 1.76466 -2.156785 2.144961 -2.211001 2.032225 -2.323631 2.086491 -2.268775 2.198811 -2.323024 2.086042 -2.435639 2.140364 -2.590909 2.006533 -2.633872 1.888826 -2.751257 1.931907 -2.821027 1.889307 -2.857234 1.769138 -2.976875 1.805504 -3.230469 1.364948 -3.245482 1.237595 -3.369577 1.253001 -3.429102 1.140441 -3.437074 1.005065 -3.561819 1.013716 -3.463854 1.440367 -3.480136 1.314207 -3.604071 1.330782 -2.230103 3.174435 -2.310445 3.078621 -2.406206 3.159007 0.7613333 3.918265 0.6365669 3.910606 0.6289282 4.035701 2.323488 3.346832 2.206225 3.390883 2.249518 3.510199 3.23871 2.630498 3.137719 2.70736 3.211379 2.812741 3.824482 1.974022 3.740707 2.073387 3.833481 2.163114 4.267676 1.286136 4.203491 1.403455 4.310754 1.473657 4.582699 0.4756825 4.542537 0.6070585 4.660909 0.6516328 4.703547 -0.4276083 4.690637 -0.2881533 4.814968 -0.2736731 4.605778 -1.319615 4.619968 -1.179225 4.74416 -1.195266 4.346451 -2.103521 4.384148 -1.968388 4.503328 -2.011131 5.445145 2.633387 5.321404 2.652276 5.3391 2.784352 5.065405 3.197872 4.940591 3.205183 4.947407 3.33907 4.504287 3.860653 4.379535 3.852212 4.371659 3.985911 3.673863 4.589608 3.551842 4.560676 3.524716 4.69082 2.42081 5.314918 2.307209 5.259705 2.255058 5.379975 0.3146606 5.7899 0.2242671 5.698557 0.1379304 5.794192 -1.716233 5.51378 -1.775703 5.396279 -1.88565 5.459834 -3.148273 4.840015 -3.180862 4.708735 -3.301539 4.744187 -4.049949 4.145484 -4.063219 4.007679 -4.187513 4.022391 -4.61123 3.571115 -4.611674 3.430561 -4.736673 3.43106 -4.955221 3.15657 -4.948125 3.015516 -5.072923 3.007496 -5.163118 2.900107 -5.152585 2.760249 -5.277141 2.748422 2.029527 -2.68073 1.924235 -2.748801 1.962159 -2.574341 2.431281 -2.152504 2.337545 -2.23643 2.348586 -2.057372 1.756914 -3.012233 1.644528 -3.067455 1.702195 -2.898812 2.667235 -1.662101 2.584157 -1.757174 2.573838 -1.577531 1.417444 -3.324764 1.299265 -3.365798 1.376717 -3.205693 2.815088 -1.170228 2.743319 -1.274487 2.712745 -1.097116 0.9595173 -3.618336 0.8368046 -3.642273 0.9357142 -3.494933 2.895594 -0.6311445 2.83773 -0.7438349 2.784794 -0.5722932 0.269454 -3.864218 0.1444556 -3.864848 0.2688264 -3.738753 2.885301 -0.02900117 2.845087 -0.1489929 2.766947 0.01176955 -0.8970078 -3.901906 -1.016854 -3.866306 -0.8614859 -3.781797 2.78442 0.3932656 2.755738 0.2703814 2.662755 0.4222356 -2.724024 -3.107239 -2.812867 -3.019195 -2.636093 -3.018281 2.60678 0.8647848 2.593295 0.7398695 2.48251 0.8783406 -4.107889 -1.120052 -4.13358 -0.9976267 -3.985558 -1.094342 2.018904 1.777496 2.047993 1.655728 1.897336 1.748359 -4.364937 0.3848947 -4.346788 0.508662 -4.241262 0.3667319 -0.7099042 2.544887 -0.5896339 2.51081 -0.7439637 2.424556 -4.215247 1.590948 -4.164592 1.705319 -4.100971 1.54025 -2.486776 0.7734672 -2.387918 0.8501096 -2.410278 0.6744231 -3.859779 2.554824 -3.785161 2.655211 -3.759494 2.48013 -2.576936 0.0138157 -2.498307 0.1115055 -2.479763 -0.06523143 -3.457478 3.259577 -3.366976 3.345907 -3.371255 3.168962 -2.537191 -0.3488599 -2.466188 -0.2449619 -2.434315 -0.4205687 -3.088362 3.771653 -2.987636 3.845787 -3.014338 3.670779 -2.497519 -0.5517299 -2.427939 -0.4463059 -2.393674 -0.6223683 -2.773925 4.157618 -2.66654 4.221706 -2.709947 4.050051 -2.457863 -0.7405783 -2.389016 -0.6342794 -2.353532 -0.8107235 -2.421274 -0.9191201 -2.352549 -0.812678 -2.316862 -0.989182 -2.390483 -1.08852 -2.321301 -0.9826743 -2.286373 -1.158855 -2.373079 -1.238644 -2.302467 -1.134326 -2.269934 -1.310058 -2.395451 -1.32767 -2.320794 -1.226879 -2.295195 -1.402725 -2.69889 -0.7792084 -2.598997 -0.7039745 -2.623747 -0.8792224 -2.589509 1.271607 -2.465896 1.253024 -2.608077 1.147897 -2.38773 1.822206 -2.270616 1.778474 -2.431427 1.704999 -2.247656 2.168275 -2.137029 2.110031 -2.305852 2.057559 -2.089918 2.479684 -1.986615 2.409246 -2.160299 2.376297 -1.927178 2.756386 -1.831424 2.675973 -2.007528 2.660556 -1.767767 3.002323 -1.679379 2.913869 -1.856155 2.913869 -1.616002 3.222907 -1.534577 3.128002 -1.710844 3.141428 -1.473067 3.423559 -1.398124 3.32346 -1.57311 3.348573 -1.337632 3.609133 -1.268712 3.504804 -1.441916 3.540182 1.055473 3.8288 1.04962 3.703909 0.9306098 3.834654 3.629245 1.884378 3.533378 1.803652 3.549029 1.980855 3.583146 2.16091 3.492915 2.071639 3.496638 2.254022 3.802178 1.979898 3.707043 1.89341 3.721097 2.081377 4.092011 1.657725 3.989363 1.579487 4.020678 1.770309 4.396565 1.216311 4.285645 1.152042 4.338931 1.339998 4.671172 0.5792151 4.55215 0.5364063 4.632979 0.7126204 4.815489 -0.2693119 4.691175 -0.2839456 4.802407 -0.130251 4.731542 -1.255241 4.607479 -1.238233 4.746817 -1.117101 4.404364 -2.230697 4.286816 -2.183652 4.446877 -2.100616 5.546165 2.616869 5.524793 2.485754 5.423006 2.639621 5.101722 3.293725 5.093842 3.160038 4.976971 3.302171 4.466759 4.020752 4.475519 3.886975 4.342067 4.011353 3.567413 4.762345 3.596496 4.632269 3.445843 4.731227 2.232627 5.45443 2.287496 5.334602 2.120313 5.39589 0.2158882 5.835859 0.3024091 5.739375 0.1256711 5.743328 -1.693695 5.567338 -1.585125 5.500427 -1.755643 5.450072 -3.117805 4.927106 -2.998125 4.887522 -3.153885 4.795804 -4.069142 4.227407 -3.945191 4.209391 -4.0853 4.089204 -4.688181 3.61518 -4.563196 3.612953 -4.690156 3.474272 -5.093909 3.132483 -4.96914 3.141063 -5.086312 2.991572 -5.359678 2.788483 -5.235398 2.803467 -5.346282 2.649473 2.360357 -2.393583 2.276366 -2.300084 2.368942 -2.215255 2.203439 -2.702948 2.128312 -2.602205 2.228218 -2.526448 2.58626 -1.952922 2.493209 -1.868215 2.576675 -1.77378 2.00354 -3.008552 1.938126 -2.901348 2.044643 -2.835511 2.718667 -1.54954 2.618968 -1.472765 2.694368 -1.371249 1.721317 -3.324024 1.667704 -3.210613 1.780623 -3.156766 2.804664 -1.139726 2.69891 -1.071812 2.765552 -0.96404 1.244733 -3.666865 1.208439 -3.54696 1.328054 -3.510578 2.854427 -0.6808035 2.742358 -0.6244785 2.797724 -0.5104696 0.1642775 -3.997448 0.1630499 -3.872332 0.2880438 -3.871104 2.835657 -0.2633143 2.719032 -0.2177303 2.764017 -0.09955192 -2.798977 -3.033307 -2.711455 -2.944029 -2.622209 -3.031582 2.762759 -0.129858 2.646541 -0.08343625 2.692567 0.03378291 -4.18469 -0.7545399 -4.061067 -0.7360269 -4.042566 -0.8597307 2.692463 0.04159993 2.575971 0.08709926 2.6213 0.2040294 -4.350531 0.4757873 -4.226722 0.4585607 -4.243936 0.3346585 2.575544 0.5291153 2.454195 0.5591242 2.484186 0.6805478 -4.205225 1.602793 -4.089743 1.554912 -4.137583 1.439331 -2.572783 0.07939361 -2.465355 0.01547705 -2.529264 -0.09196443 -3.885269 2.506865 -3.782172 2.43611 -3.852855 2.332907 -2.493186 -0.4218087 -2.378107 -0.4707177 -2.426911 -0.5860443 -3.525295 3.179238 -3.434793 3.092908 -3.521016 3.002293 -2.424021 -0.5864343 -2.308711 -0.6349852 -2.356965 -0.7510043 -3.194297 3.676672 -3.114867 3.580011 -3.211386 3.500465 -2.357766 -0.7380029 -2.242506 -0.7869189 -2.290879 -0.9034713 -2.912979 4.056478 -2.84274 3.952908 -2.946139 3.882552 -2.32421 -0.8091073 -2.210833 -0.8626193 -2.263472 -0.9778763 -2.296297 -0.8973833 -2.184751 -0.9549266 -2.241164 -1.068708 -2.272621 -1.0036 -2.162773 -1.064463 -2.222426 -1.176538 -2.258185 -1.114148 -2.150199 -1.178188 -2.213161 -1.288023 -2.263849 -1.207533 -2.158463 -1.275528 -2.225684 -1.382128 -2.32403 -1.220805 -2.22397 -1.296125 -2.298891 -1.396718 -2.509255 -0.9764877 -2.423899 -1.067917 -2.51522 -1.153374 -2.574777 1.011579 -2.581422 0.8866794 -2.706245 0.893328 -2.234704 1.823099 -2.280724 1.706777 -2.396944 1.752838 -2.096532 2.149878 -2.156818 2.040275 -2.266319 2.100617 -1.951191 2.437533 -2.023072 2.335171 -2.125337 2.407121 -1.809975 2.689958 -1.891087 2.594757 -1.986197 2.675946 -1.679379 2.913245 -1.767767 2.824775 -1.856155 2.913245 -1.56254 3.113477 -1.656649 3.031136 -1.738919 3.125326 -1.460746 3.295774 -1.559354 3.218895 -1.636175 3.317576 -1.374691 3.46401 -1.476832 3.391908 -1.548889 3.494111 -0.6496555 3.791859 -0.7648488 3.743282 -0.8133824 3.858578 0.8996733 3.821163 0.7746758 3.821969 0.7754723 3.94852 2.041937 3.412705 1.922771 3.451971 1.960514 3.575946 2.734413 3.004247 2.624508 3.068078 2.684056 3.185887 3.371575 2.518091 3.27492 2.604867 3.354185 2.710681 3.964851 1.887318 3.886857 1.995394 3.98454 2.081687 4.458989 1.010157 4.407292 1.136441 4.5211 1.193806 4.702321 -0.1296595 4.684607 0.007970172 4.808346 0.02767169 4.574961 -1.318448 4.592923 -1.179933 4.716626 -1.200047 4.168059 -2.304575 4.215938 -2.174101 4.331405 -2.228202 5.516437 2.656012 5.393054 2.677517 5.413092 2.809929 5.099947 3.29339 4.975191 3.301775 4.983008 3.43561 4.452838 4.053128 4.328175 4.043344 4.319003 4.176315 3.446021 4.885087 3.325192 4.851207 3.293172 4.979051 1.75156 5.684288 1.644207 5.617082 1.580173 5.729753 -0.6950979 5.872958 -0.7714109 5.768663 -0.8704125 5.849056 -2.640819 5.276509 -2.68393 5.151087 -2.80126 5.197171 -3.849803 4.481042 -3.868046 4.3461 -3.991707 4.366007 -4.557066 3.795573 -4.559169 3.656445 -4.684151 3.658785 -4.978137 3.283851 -4.97049 3.143137 -5.095256 3.134514 -5.23196 2.941657 -5.219215 2.801118 -5.343563 2.786713 1.65555 -2.925009 1.544405 -2.982605 1.59835 -2.813094 2.258049 -2.323149 2.161525 -2.403457 2.178626 -2.225549 1.237683 -3.259632 1.119247 -3.299844 1.19771 -3.140488 2.575726 -1.775085 2.492211 -1.869476 2.482718 -1.690327 0.6883098 -3.547972 0.5648366 -3.567543 0.6688327 -3.423906 2.758435 -1.24574 2.687863 -1.35067 2.655262 -1.173966 -0.08422238 -3.741914 -0.209028 -3.734921 -0.07725478 -3.616662 2.851951 -0.6907767 2.796198 -0.8045641 2.740073 -0.634072 -1.217671 -3.675637 -1.335018 -3.632463 -1.174606 -3.557994 2.843047 -0.1773215 2.801564 -0.2970822 2.725132 -0.1351891 -2.669198 -2.980058 -2.759684 -2.893658 -2.582958 -2.889404 2.76669 0.06694971 2.728921 -0.05372876 2.647532 0.1052005 -3.765227 -1.690612 -3.812922 -1.574923 -3.649684 -1.642857 2.670394 0.365896 2.639222 0.2438839 2.549343 0.397316 -4.208391 -0.6087012 -4.22257 -0.4843704 -4.084198 -0.5945065 2.478634 0.8868589 2.465704 0.7621186 2.354305 0.8998323 -4.360652 0.3719947 -4.346377 0.4963127 -4.23647 0.357704 1.895952 1.747015 1.925138 1.625395 1.774407 1.717811 -4.291932 1.353675 -4.250922 1.471893 -4.173851 1.312617 -2.216189 1.228058 -2.106037 1.287172 -2.1571 1.117859 -4.058252 2.215983 -3.995282 2.324103 -3.950271 2.152931 -2.499119 -0.05178045 -2.4268 0.05045412 -2.397164 -0.1242984 -3.747137 2.914541 -3.667707 3.011201 -3.650618 2.834994 -2.432668 -0.4642429 -2.37099 -0.354768 -2.323944 -0.526347 -3.424282 3.463533 -3.333062 3.549136 -3.338819 3.372163 -2.380051 -0.6463584 -2.319282 -0.5357622 -2.270818 -0.7078862 -3.12269 3.897378 -3.023126 3.97309 -3.047112 3.797637 -2.325644 -0.8270034 -2.265633 -0.715407 -2.215991 -0.8880775 -2.268605 -1.011733 -2.209374 -0.8993935 -2.158529 -1.072183 -2.206505 -1.205133 -2.148284 -1.092307 -2.095892 -1.264519 -2.135357 -1.410458 -2.078599 -1.297293 -2.023986 -1.468131 -2.045832 -1.633642 -1.991476 -1.519942 -1.933269 -1.688547 -1.908006 -1.894237 -1.858521 -1.778984 -1.793219 -1.943923 -2.504585 -1.176277 -2.414048 -1.08999 -2.418399 -1.26692 -2.707542 0.8893325 -2.58271 0.8828651 -2.714006 0.7644186 -2.322174 1.850935 -2.207959 1.800105 -2.372968 1.736638 -2.143348 2.226133 -2.037442 2.159683 -2.209748 2.120146 -1.953272 2.548912 -1.856231 2.470058 -2.032063 2.451794 -1.767767 2.825187 -1.679379 2.736728 -1.856155 2.736728 -1.594905 3.064251 -1.514555 2.968421 -1.69066 2.983839 -1.437309 3.27516 -1.364243 3.173663 -1.538731 3.202041 -1.294337 3.465404 -1.22781 3.35951 -1.400163 3.398834 -1.163361 3.640782 -1.102721 3.531418 -1.272667 3.58011 2.531111 3.02529 2.470636 2.915784 2.421714 3.085824 2.632317 3.056373 2.570419 2.94599 2.523718 3.119288 2.757849 3.023994 2.693795 2.911338 2.650509 3.091221 3.038332 2.86791 2.967096 2.756778 2.935617 2.944985 3.448077 2.59761 3.365673 2.494221 3.354084 2.68825 3.936589 2.165683 3.84058 2.077531 3.856543 2.271413 4.459247 1.396742 4.347956 1.334634 4.402331 1.518186 4.808418 0.1370997 4.685218 0.1142661 4.787276 0.2701576 4.657926 -1.434005 4.535002 -1.409581 4.680612 -1.301661 3.997751 -2.795602 3.888655 -2.72959 4.058767 -2.677574 5.615216 2.648072 5.591744 2.516643 5.492439 2.673198 5.136462 3.390783 5.127635 3.257144 5.011774 3.400244 4.414561 4.210912 4.424568 4.077883 4.289963 4.200229 3.339378 5.048364 3.373131 4.920671 3.219021 5.012555 1.573206 5.792086 1.639284 5.679848 1.467099 5.72219 -0.7327578 5.921339 -0.6345235 5.839208 -0.8100561 5.816963 -2.613339 5.35591 -2.497167 5.306207 -2.659482 5.230775 -3.858277 4.567503 -3.735089 4.544239 -3.87948 4.432342 -4.631466 3.841826 -4.506519 3.837763 -4.635107 3.702387 -5.115847 3.260529 -4.99111 3.269658 -5.107747 3.119943 -5.426059 2.830679 -5.302022 2.848116 -5.410573 2.691018 2.14208 -2.593888 2.062744 -2.49668 2.159339 -2.41684 1.878642 -2.941338 1.811548 -2.835339 1.917015 -2.767907 2.489056 -2.070611 2.396557 -1.985604 2.480634 -1.892081 1.497536 -3.292818 1.445897 -3.178575 1.559732 -3.126751 2.665411 -1.621165 2.564721 -1.54597 2.63879 -1.44375 0.8656114 -3.643944 0.8367577 -3.522053 0.958382 -3.493136 2.767992 -1.184681 2.660669 -1.119475 2.724755 -1.010274 -0.3873608 -3.853861 -0.3748653 -3.729349 -0.2504914 -3.741859 2.82122 -0.7166803 2.707634 -0.6635858 2.759818 -0.5480172 -2.711177 -2.941166 -2.626453 -2.849199 -2.534546 -2.933979 2.788605 -0.3935327 2.672206 -0.3472749 2.717774 -0.2291119 -3.890297 -1.372855 -3.772409 -1.331251 -3.730846 -1.449256 2.704244 -0.3477542 2.589899 -0.2966702 2.640398 -0.1810017 -4.225393 -0.4589767 -4.101117 -0.4455304 -4.087685 -0.5699423 2.632239 -0.2730597 2.519587 -0.21853 2.573758 -0.1051326 -4.350011 0.4636299 -4.225725 0.4502742 -4.239066 0.3258507 2.570889 -0.1633257 2.459278 -0.1069171 2.515562 0.004940896 -4.286199 1.366157 -4.167138 1.32804 -4.205209 1.208837 2.296734 -1.022302 2.217462 -0.9256369 2.314111 -0.8463514 -4.088133 2.157159 -3.977643 2.098627 -4.036098 1.987991 -2.19064 -1.122501 -2.06583 -1.129392 -2.072717 -1.25429 -3.827553 2.805966 -3.726828 2.731833 -3.800851 2.630958 -2.258532 -0.8680034 -2.136751 -0.8962929 -2.164937 -1.018524 -3.557572 3.324994 -3.466352 3.239391 -3.551816 3.148021 -2.213305 -0.8989059 -2.093055 -0.9333217 -2.127183 -1.054584 -3.306436 3.74173 -3.223775 3.647798 -3.317542 3.564992 -2.174453 -0.9449257 -2.055883 -0.9850705 -2.095458 -1.105348 -2.143187 -1.005543 -2.026446 -1.0511 -2.071129 -1.170125 -2.104053 -1.109193 -1.988598 -1.158156 -2.036503 -1.276161 -2.060296 -1.241154 -1.945735 -1.292214 -1.995743 -1.409186 -2.011892 -1.394031 -1.8979 -1.446179 -1.949193 -1.56207 -1.956023 -1.564096 -1.842254 -1.616411 -1.894037 -1.731348 -1.878466 -1.758126 -1.764242 -1.809102 -1.815014 -1.923785 -1.985916 -1.751797 -1.876887 -1.813009 -1.938022 -1.922176 -2.682427 0.5062825 -2.671035 0.3817413 -2.795515 0.3703439 -2.178516 1.835373 -2.23115 1.721906 -2.344528 1.774581 -2.008705 2.186092 -2.076497 2.080982 -2.181517 2.148832 -1.838045 2.48319 -1.917546 2.386641 -2.014006 2.466215 -1.679379 2.736221 -1.767767 2.647749 -1.856155 2.736221 -1.538234 2.955656 -1.633344 2.874467 -1.714456 2.969668 -1.415983 3.150323 -1.516197 3.075541 -1.590911 3.175846 -1.312171 3.326867 -1.416286 3.257636 -1.48546 3.361837 -1.225986 3.489972 -1.333097 3.42549 -1.397534 3.532677 -0.0284714 3.811912 -0.1508581 3.786409 -0.1762845 3.909166 1.112382 3.691382 0.98794 3.703482 0.9997377 3.831112 1.732368 3.472092 1.61139 3.505502 1.642843 3.634008 2.218083 3.273341 2.10189 3.323745 2.147979 3.450818 2.76255 3.035791 2.654072 3.104517 2.71618 3.224554 3.435509 2.609592 3.341147 2.699849 3.423128 2.803736 4.170133 1.770256 4.101349 1.883637 4.205722 1.958357 4.680116 0.279795 4.654263 0.4113994 4.77656 0.4392205 4.503496 -1.481937 4.528535 -1.349557 4.651001 -1.376623 3.805544 -2.772826 3.869475 -2.654749 3.976889 -2.725028 5.602617 2.665676 5.479734 2.690278 5.502645 2.822235 5.144348 3.390083 5.01968 3.399805 5.028786 3.532898 4.382254 4.273601 4.257732 4.262053 4.246808 4.39368 3.11394 5.233331 2.995131 5.192682 2.956282 5.316997 0.781753 6.013785 0.6851347 5.931268 0.6058257 6.031795 -1.846274 5.751719 -1.90426 5.6355 -2.014997 5.696357 -3.550741 4.879868 -3.575942 4.7489 -3.698376 4.775859 -4.488848 4.049101 -4.4929 3.912347 -4.617834 3.916783 -5.010523 3.421208 -5.002039 3.28198 -5.126751 3.272509 -5.313455 2.988677 -5.298124 2.848695 -5.42218 2.831396 1.989753 -2.553438 1.888009 -2.626603 1.917138 -2.450924 2.449059 -1.93112 2.363486 -2.023275 2.357943 -1.844571 2.688245 -1.354184 2.617702 -1.458869 2.585053 -1.282621 2.804631 -0.7776574 2.750031 -0.8918962 2.692187 -0.7221864 2.794803 -0.3494962 2.75105 -0.4685128 2.67771 -0.3050246 2.720467 -0.1996111 2.676191 -0.3182636 2.603572 -0.154669 2.648639 0.007368828 2.606533 -0.1116119 2.530945 0.04993588 2.559109 0.321592 2.524467 0.2007888 2.439005 0.3564358 2.45008 0.581661 2.42098 0.4598689 2.328515 0.6108152 1.8753 1.602771 1.897033 1.479666 1.752203 1.581036 -2.408851 -0.287994 -2.35116 -0.1770049 -2.29796 -0.3457353 -2.311289 -0.6309659 -2.262581 -0.5153697 -2.196169 -0.6798753 -2.250413 -0.7690827 -2.200916 -0.6532404 -2.13563 -0.8190358 -2.192028 -0.9085428 -2.141704 -0.7923521 -2.077605 -0.9596445 -2.124541 -1.074758 -2.074565 -0.9578574 -2.009967 -1.125749 -2.045276 -1.262629 -1.996601 -1.144939 -1.930143 -1.312385 -1.947235 -1.473406 -1.901008 -1.354889 -1.831097 -1.520579 -1.814367 -1.713655 -1.77245 -1.594053 -1.696605 -1.756227 -1.600892 -2.002655 -1.567334 -1.881139 -1.480481 -2.03652 -1.555719 -2.138183 -1.519842 -2.018005 -1.435978 -2.174191 -2.135902 -1.699766 -2.063256 -1.597896 -2.034179 -1.772517 -2.795177 0.3728619 -2.670687 0.3841464 -2.783898 0.2483092 -2.221231 1.926866 -2.111601 1.866778 -2.281283 1.81717 -1.992466 2.32539 -1.893531 2.24894 -2.068865 2.226388 -1.767767 2.648473 -1.679379 2.56002 -1.856155 2.56002 -1.563372 2.913612 -1.48458 2.816495 -1.660413 2.834759 -1.38376 3.137606 -1.313379 3.034219 -1.487063 3.067168 -1.227616 3.333317 -1.164505 3.225334 -1.335514 3.270156 -1.091322 3.509776 -1.034495 3.39836 -1.202658 3.452907 -0.97045 3.673143 -0.9191069 3.559104 -1.084419 3.621768 1.579756 3.581918 1.548309 3.460563 1.458777 3.613462 1.705128 3.579957 1.670615 3.456517 1.584987 3.615417 1.873385 3.526245 1.834622 3.399752 1.754548 3.567505 2.121405 3.466181 2.076146 3.33881 2.004887 3.515655 2.487574 3.393174 2.432533 3.269736 2.375344 3.453711 3.027803 3.207875 2.958109 3.095375 2.924034 3.283433 3.825125 2.607996 3.733188 2.518417 3.740434 2.705239 4.717797 0.8719204 4.598596 0.8329113 4.680166 0.9954863 4.444342 -1.964599 4.32544 -1.924687 4.482908 -1.841549 3.093592 -3.709046 3.005204 -3.615964 3.181981 -3.615964 5.70047 2.661315 5.6743 2.530363 5.57824 2.689352 5.181649 3.487343 5.171571 3.354438 5.057056 3.498095 4.342559 4.428847 4.354272 4.297191 4.218109 4.416456 3.001629 5.387221 3.042062 5.263165 2.883349 5.344813 0.6573757 6.090999 0.7373805 5.990446 0.5613328 6.007237 -1.844043 5.820229 -1.734421 5.756803 -1.90411 5.704476 -3.555197 4.967027 -3.433375 4.936929 -3.583203 4.836105 -4.559564 4.099267 -4.434691 4.093096 -4.565189 3.962268 -5.147266 3.398016 -5.022583 3.407941 -5.138371 3.258899 -5.505846 2.874648 -5.382148 2.894886 -5.487847 2.735556 2.339008 -2.241045 2.249046 -2.153619 2.335833 -2.062995 2.592937 -1.726673 2.492179 -1.651833 2.566157 -1.5499 2.723323 -1.257345 2.615058 -1.193931 2.677537 -1.084045 2.785775 -0.7782287 2.671162 -0.7275213 2.721049 -0.6110226 2.739592 -0.5278877 2.623483 -0.4808398 2.669785 -0.3628599 2.64075 -0.5256934 2.527698 -0.4716097 2.58103 -0.3569624 2.558673 -0.488821 2.448396 -0.4293907 2.507252 -0.3180374 2.485598 -0.4578578 2.378565 -0.3929845 2.443132 -0.2854444 2.312987 -0.8327944 2.223088 -0.7458247 2.30994 -0.6558046 -0.05432212 -2.397937 0.006644795 -2.288779 0.1157687 -2.349765 -1.893187 -1.391067 -1.76859 -1.381012 -1.758555 -1.505847 -2.013976 -1.133724 -1.889724 -1.147458 -1.903378 -1.272435 -1.99934 -1.105616 -1.876539 -1.129232 -1.899883 -1.253463 -1.985428 -1.106479 -1.864554 -1.138903 -1.896406 -1.261948 -1.944649 -1.182084 -1.825068 -1.219311 -1.861473 -1.34159 -1.887537 -1.3044 -1.768667 -1.343992 -1.807331 -1.465712 -1.813126 -1.462958 -1.694406 -1.502925 -1.733525 -1.624218 -1.710675 -1.656913 -1.591432 -1.69502 -1.628932 -1.816196 -1.544778 -1.899129 -1.423978 -1.93156 -1.456111 -2.053482 -1.242623 -2.197128 -1.119224 -2.217153 -1.139162 -2.341084 -1.693127 -1.984909 -1.578707 -2.035321 -1.629038 -2.149926 -2.697891 -0.05243088 -2.66663 -0.1735073 -2.787658 -0.2047804 -2.089374 1.891416 -2.150717 1.782431 -2.25963 1.843814 -1.878048 2.261606 -1.955123 2.163123 -2.053532 2.240255 -1.679379 2.559665 -1.767767 2.471203 -1.856155 2.559665 -1.505219 2.805096 -1.60168 2.725522 -1.681181 2.822071 -1.357582 3.014223 -1.459847 2.942273 -1.531728 3.044635 -1.234483 3.19869 -1.340983 3.133185 -1.406426 3.239784 -1.13296 3.366291 -1.242596 3.306198 -1.302637 3.415929 -1.050431 3.522077 -1.16242 3.466505 -1.21795 3.578578 0.1357665 3.774737 0.01181202 3.758504 -0.004321507 3.883225 0.9047756 3.684284 0.7801057 3.693694 0.7891839 3.822921 1.268176 3.579773 1.144845 3.601701 1.165205 3.734531 1.594907 3.516155 1.473546 3.549267 1.503489 3.683472 2.003921 3.477692 1.886048 3.523847 1.927653 3.654611 2.604741 3.348456 2.494356 3.412294 2.553008 3.532441 3.508565 2.799289 3.416628 2.888868 3.501319 2.986111 4.570323 0.9765454 4.528843 1.098644 4.646759 1.141596 4.291435 -1.985678 4.331926 -1.863019 4.450186 -1.905016 3.005204 -3.578128 3.093592 -3.484232 3.181981 -3.578128 5.709256 2.647703 5.587111 2.676107 5.613674 2.806721 5.203748 3.477841 5.07922 3.489316 5.090068 3.621047 4.279254 4.527119 4.154978 4.513092 4.141546 4.642877 2.572504 5.652138 2.457649 5.601179 2.408321 5.719832 -0.5255872 6.166237 -0.6049105 6.066497 -0.701517 6.148393 -3.070156 5.356321 -3.105701 5.230849 -3.225541 5.268065 -4.393904 4.335552 -4.400511 4.201831 -4.525336 4.208909 -5.056833 3.55871 -5.047098 3.421941 -5.171718 3.411257 -5.414349 3.026319 -5.39581 2.888155 -5.519427 2.867434 2.260741 -2.142593 2.170824 -2.230112 2.173908 -2.051965 2.597593 -1.498642 2.525677 -1.60204 2.495353 -1.425912 2.751838 -0.8892177 2.69746 -1.00333 2.639286 -0.834086 2.739058 -0.5305574 2.692645 -0.6484832 2.622994 -0.4833999 2.658578 -0.4313124 2.609244 -0.5480567 2.543725 -0.3811659 2.592297 -0.2744408 2.542672 -0.3907404 2.47757 -0.224136 2.529603 -0.06124582 2.48244 -0.1780328 2.413842 -0.01366451 2.46211 0.04042144 2.412398 -0.07474193 2.34742 0.090338 2.402541 -0.08611387 2.339732 -0.1942889 2.294468 -0.02324523 -1.378336 -1.910933 -1.425633 -1.795217 -1.26263 -1.863632 -2.100206 -0.9831989 -2.076689 -0.8601891 -1.977439 -1.006762 -2.076596 -0.9735392 -2.044486 -0.8519881 -1.955791 -1.005847 -2.045413 -1.00943 -2.007037 -0.8889909 -1.92645 -1.048282 -1.981379 -1.130477 -1.941354 -1.009843 -1.86296 -1.171251 -1.897472 -1.29259 -1.857934 -1.171287 -1.77889 -1.333035 -1.789772 -1.486231 -1.752478 -1.364084 -1.670465 -1.524413 -1.644752 -1.711256 -1.611844 -1.588133 -1.524162 -1.744855 -1.428937 -1.974097 -1.40399 -1.84973 -1.306452 -1.999428 -1.134401 -2.245108 -1.120461 -2.119796 -1.010181 -2.259171 -1.334149 -2.236214 -1.303821 -2.114408 -1.212884 -2.266677 -1.773859 -2.032197 -1.715739 -1.921339 -1.663193 -2.090418 -2.784708 -0.2417148 -2.664106 -0.2088379 -2.751843 -0.3623606 -2.055625 2.06898 -1.953672 1.996623 -2.127948 1.966979 -1.767767 2.472155 -1.679379 2.383717 -1.856155 2.383717 -1.514217 2.775834 -1.437818 2.676832 -1.613153 2.699385 -1.302765 3.015664 -1.236365 2.909677 -1.408671 2.949214 -1.128799 3.216021 -1.070603 3.105305 -1.239425 3.157778 -0.9848601 3.391938 -0.9334121 3.277923 -1.098782 3.340448 -0.8638557 3.552474 -0.8180268 3.436089 -0.9801515 3.50661 -0.7596802 3.703145 -0.7186159 3.585002 -0.8777426 3.662052 0.5914484 3.838856 0.588494 3.713131 0.4664834 3.841829 0.7867943 3.823391 0.7777914 3.69416 0.662119 3.832723 0.8880429 3.804565 0.8762161 3.670706 0.7636037 3.817288 1.001601 3.836714 0.9867442 3.700014 0.8774875 3.853078 1.166023 3.942675 1.146824 3.806801 1.042506 3.963794 1.441422 4.089967 1.414957 3.958914 1.319256 4.118357 2.006448 4.125183 1.964967 4.003084 1.888531 4.168135 3.758173 2.960405 3.668433 2.872678 3.671157 3.050878 3.093592 -3.736151 3.005204 -3.647063 3.181981 -3.647063 1.067038 -4.681548 1.028471 -4.558497 1.185939 -4.641636 5.807081 2.642595 5.777388 2.513013 5.68566 2.674285 5.242491 3.573476 5.230696 3.441925 5.118049 3.585945 4.237259 4.679444 4.251438 4.549664 4.113066 4.664628 2.436788 5.796455 2.487802 5.678275 2.322672 5.743623 -0.5822055 6.23271 -0.4861016 6.149824 -0.662137 6.133053 -3.072263 5.442887 -2.953205 5.402869 -3.110345 5.317777 -4.459199 4.391707 -4.334471 4.382854 -4.467447 4.257832 -5.192476 3.535118 -5.067885 3.546202 -5.182372 3.39844 -5.605342 2.905457 -5.482149 2.929052 -5.584167 2.768186 2.489013 -1.874291 2.389452 -1.7981 2.465035 -1.697738 2.66833 -1.35714 2.559728 -1.294517 2.621621 -1.184633 2.747923 -0.860568 2.632652 -0.8115209 2.681002 -0.6945879 2.690413 -0.6572608 2.574503 -0.6097171 2.621298 -0.4919525 2.575451 -0.6825524 2.46334 -0.6263925 2.518624 -0.5125053 2.48207 -0.6623961 2.373417 -0.5998058 2.43522 -0.4897681 2.397679 -0.6482524 2.293115 -0.5792208 2.361608 -0.4738349 2.249133 -0.858401 2.157059 -0.773557 2.2416 -0.6811531 1.575564 -1.730132 1.539157 -1.610417 1.658738 -1.573969 -0.8512747 -2.118642 -0.7539484 -2.040115 -0.6755099 -2.137551 -1.66829 -1.491049 -1.544679 -1.472392 -1.526093 -1.596474 -1.780193 -1.297799 -1.655224 -1.300608 -1.658008 -1.42668 -1.809897 -1.219755 -1.686005 -1.236622 -1.702614 -1.362438 -1.786163 -1.244069 -1.663527 -1.26879 -1.687722 -1.394093 -1.730127 -1.337136 -1.608265 -1.36568 -1.636097 -1.490658 -1.648092 -1.477823 -1.526449 -1.507343 -1.555227 -1.632121 -1.532271 -1.659134 -1.41025 -1.686868 -1.437381 -1.8116 -1.355559 -1.883287 -1.232478 -1.905453 -1.254297 -2.030492 -1.033814 -2.161337 -0.9091347 -2.170368 -0.9180824 -2.296219 -1.056965 -2.245446 -0.933059 -2.262033 -0.9495605 -2.386583 -1.447214 -2.13014 -1.32985 -2.173242 -1.37287 -2.290831 -2.611569 -0.5668836 -2.562702 -0.6819727 -2.677755 -0.7308544 -1.940734 2.009051 -2.013713 1.907515 -2.115197 1.980532 -1.679379 2.383508 -1.767767 2.295064 -1.856155 2.383508 -1.456159 2.666631 -1.554568 2.589498 -1.631643 2.687981 -1.27473 2.892748 -1.37975 2.824898 -1.447542 2.930007 -1.129434 3.084019 -1.238935 3.023677 -1.299221 3.133279 -1.013292 3.253708 -1.125923 3.199442 -1.180138 3.312178 -0.9204404 3.40966 -1.035315 3.360333 -1.084598 3.47531 -0.8466314 3.556534 -0.9631477 3.511233 -1.008413 3.627841 0.1368932 3.749579 0.01247309 3.737459 0.0004468514 3.862858 0.5349618 3.71826 0.4099669 3.719435 0.4110954 3.84955 0.6897638 3.684316 0.5649053 3.690751 0.5708532 3.825836 0.822667 3.712153 0.6980537 3.723039 0.7078783 3.861113 0.990999 3.825563 0.8668476 3.84164 0.8813886 3.978908 1.266767 3.992153 1.143742 4.015972 1.165875 4.148364 1.831525 4.059144 1.712324 4.098153 1.749955 4.221719 3.578551 2.983446 3.488811 3.071172 3.575827 3.161645 3.005204 -3.644121 3.093592 -3.554988 3.181981 -3.644121 1.106104 -4.52934 1.224364 -4.487343 1.264855 -4.610002 5.844286 2.576541 5.723292 2.609826 5.754683 2.73812 5.287408 3.538059 5.163122 3.552 5.176463 3.681869 4.114983 4.826234 3.99119 4.80838 3.973862 4.935929 1.597918 6.125955 1.491975 6.058207 1.425634 6.166396 -2.176782 5.928352 -2.229606 5.812033 -2.342896 5.866269 -4.244132 4.665434 -4.254554 4.535072 -4.379119 4.545979 -5.125777 3.677442 -5.114116 3.543821 -5.23857 3.531301 -5.54472 3.029372 -5.521955 2.894226 -5.644864 2.869194 2.470945 -1.689355 2.395707 -1.789975 2.371124 -1.613514 2.68977 -1.026753 2.634558 -1.140141 2.577625 -0.9709288 2.675905 -0.7129913 2.626749 -0.8295876 2.560977 -0.6631219 2.585778 -0.6434876 2.532171 -0.7583114 2.472857 -0.5889769 2.518249 -0.5141948 2.462888 -0.6280446 2.406177 -0.4579561 2.46141 -0.3505197 2.406012 -0.4639168 2.349356 -0.2944573 2.394383 -0.28402 2.334569 -0.3945637 2.284622 -0.2237801 2.306608 -0.4250318 2.233057 -0.5264026 2.205537 -0.3512625 1.861078 -1.334814 1.747642 -1.387352 1.808568 -1.221316 -1.354743 -1.791251 -1.396171 -1.673225 -1.236808 -1.74979 -1.796092 -1.292217 -1.791808 -1.166823 -1.671165 -1.296516 -1.861045 -1.159556 -1.839312 -1.035339 -1.737948 -1.181486 -1.827766 -1.197537 -1.799582 -1.073798 -1.705984 -1.226174 -1.753612 -1.314581 -1.723719 -1.190514 -1.632239 -1.345138 -1.648905 -1.477181 -1.620063 -1.352433 -1.527278 -1.506764 -1.506931 -1.674902 -1.481602 -1.549378 -1.384524 -1.700876 -1.306126 -1.905188 -1.287459 -1.778911 -1.182527 -1.92426 -0.9989411 -2.164348 -0.9919801 -2.037611 -0.8741351 -2.171417 -0.9788117 -2.27135 -0.9665795 -2.145776 -0.8544116 -2.283697 -1.127325 -2.308382 -1.101536 -2.185419 -1.005014 -2.334308 -1.435954 -2.251866 -1.389694 -2.135513 -1.319829 -2.298216 -2.620162 -0.9161622 -2.508808 -0.8593571 -2.56337 -1.027542 -1.767767 2.296115 -1.679379 2.207696 -1.856155 2.207696 -1.429892 2.661299 -1.357569 2.559298 -1.531845 2.588942 -1.172412 2.917826 -1.11236 2.808129 -1.282042 2.857737 -0.9779956 3.118436 -0.9272017 3.004139 -1.09221 3.067605 -0.8284816 3.289352 -0.7847845 3.172145 -0.9455951 3.24562 -0.7104129 3.443739 -0.6722864 3.324596 -0.8294565 3.405581 -0.6144049 3.588502 -0.5807646 3.468016 -0.7347932 3.554835 -0.5337742 3.727443 -0.5038453 3.605991 -0.6551383 3.697493 -0.2351604 3.85534 -0.2156265 3.730889 -0.3586247 3.83565 -0.07853704 3.867228 -0.06443878 3.737873 -0.2027394 3.852545 -0.0922927 3.858628 -0.07820472 3.724147 -0.2164963 3.843374 -0.1637877 3.908381 -0.148029 3.770858 -0.2877904 3.890904 -0.2991023 4.047227 -0.2799685 3.910524 -0.4226292 4.026053 -0.5582433 4.255672 -0.5325424 4.123926 -0.6805726 4.227993 -1.143382 4.412384 -1.102891 4.289726 -1.261643 4.370387 -3.093592 3.632338 -3.005204 3.543205 -3.181981 3.543205 -3.662843 -3.175914 -3.575827 -3.085442 -3.573103 -3.263641 -1.862848 -4.423309 -1.825217 -4.299743 -1.743647 -4.462318 5.942602 2.566742 5.90818 2.439524 5.822435 2.603183 5.328461 3.630706 5.314186 3.501011 5.204279 3.645615 4.069286 4.97494 4.087316 4.847438 3.945593 4.956355 1.464325 6.24189 1.531798 6.134212 1.3591 6.172844 -2.187893 6.009449 -2.07551 5.953112 -2.242622 5.893766 -4.300942 4.730317 -4.176537 4.717547 -4.313126 4.599927 -5.259963 3.652961 -5.135539 3.665823 -5.24798 3.519411 -5.734077 2.897295 -5.611687 2.925163 -5.70867 2.763051 2.596711 -1.488408 2.488514 -1.425283 2.551113 -1.316177 2.706417 -0.9617379 2.590814 -0.9136341 2.638363 -0.796684 2.643428 -0.7768058 2.527529 -0.7292923 2.574351 -0.6116836 2.509159 -0.82809 2.397782 -0.7704058 2.454528 -0.6571873 2.403822 -0.8166832 2.296422 -0.7517597 2.360376 -0.6427327 2.310691 -0.8009463 2.207727 -0.7292808 2.278604 -0.6251715 2.172071 -0.926445 2.079341 -0.8420708 2.163164 -0.7487305 1.819016 -1.384884 1.756613 -1.276238 1.864922 -1.213641 0.5827461 -2.143902 0.6075788 -2.021202 0.7300873 -2.046074 -1.00621 -1.920558 -0.8997876 -1.854812 -0.8342184 -1.961521 -1.474122 -1.532212 -1.351498 -1.507795 -1.327238 -1.631209 -1.594319 -1.361368 -1.469349 -1.35858 -1.466596 -1.485144 -1.617691 -1.310513 -1.49311 -1.320942 -1.503337 -1.447997 -1.578512 -1.358683 -1.454618 -1.3757 -1.471208 -1.502777 -1.502167 -1.468374 -1.378628 -1.487973 -1.397683 -1.615038 -1.389131 -1.623906 -1.265497 -1.642841 -1.283925 -1.769876 -1.223064 -1.819305 -1.098891 -1.833988 -1.113246 -1.960997 -0.9574958 -2.05251 -0.8325916 -2.05748 -0.8374837 -2.184376 -0.7956878 -2.210553 -0.6707138 -2.213127 -0.6732606 -2.339454 -0.9332135 -2.260462 -0.8093086 -2.27707 -0.8258185 -2.401709 -1.230229 -2.230684 -1.111041 -2.268437 -1.148715 -2.387875 -2.464137 -0.9801853 -2.402002 -1.088674 -2.510466 -1.150822 -1.679379 2.2076 -1.767767 2.119178 -1.856155 2.2076 -1.37407 2.550338 -1.475555 2.477321 -1.548534 2.578857 -1.145487 2.794596 -1.2544 2.733213 -1.315743 2.842197 -0.9757234 2.988504 -1.089102 2.935828 -1.141736 3.049295 -0.8478641 3.155604 -0.9640844 3.109543 -1.010105 3.225866 -0.7499137 3.307657 -0.8680334 3.266721 -0.9089324 3.384949 -0.6739243 3.450817 -0.7933643 3.413918 -0.8302302 3.533465 -0.6148008 3.588437 -0.7351853 3.554756 -0.7688389 3.675238 0.0751747 3.741709 -0.0494283 3.731689 -0.05938278 3.857111 0.09694597 3.751152 -0.02773404 3.741877 -0.03667255 3.871247 0.04541217 3.744 -0.07916604 3.73295 -0.08942612 3.867119 -0.04247979 3.793928 -0.1668438 3.780043 -0.1794369 3.917164 -0.186389 3.933938 -0.3103063 3.915866 -0.3267222 4.052283 -0.4487519 4.146526 -0.5715522 4.121479 -0.5948994 4.253221 -1.031675 4.313821 -1.150577 4.273909 -1.189143 4.396959 -3.005204 3.543838 -3.093592 3.45475 -3.181981 3.543838 -3.671157 -2.974674 -3.584141 -3.065147 -3.673881 -3.152874 -1.971492 -4.246213 -1.853575 -4.289165 -1.895056 -4.411264 6.018812 2.406488 5.899751 2.446395 5.937823 2.571196 5.413821 3.534384 5.290012 3.552127 5.307225 3.679742 3.813276 5.206862 3.69064 5.182249 3.666443 5.306996 -0.2088896 6.444063 -0.2931131 6.350542 -0.3854787 6.435819 -3.959435 5.076853 -3.976693 4.950046 -4.100496 4.967723 -5.236149 3.739975 -5.221298 3.609862 -5.345413 3.594293 -5.720782 2.952009 -5.692073 2.821032 -5.813731 2.790124 2.610559 -1.197101 2.553086 -1.308992 2.499555 -1.139169 2.605839 -0.8937425 2.553974 -1.008844 2.492107 -0.8412532 2.50293 -0.8464044 2.445385 -0.9591551 2.391964 -0.7879337 2.431817 -0.7314664 2.371623 -0.8428899 2.322265 -0.6702437 2.375191 -0.5902292 2.313696 -0.7006622 2.266364 -0.5278266 2.305308 -0.5266062 2.239022 -0.6336973 2.19933 -0.4596242 2.20527 -0.6217369 2.12708 -0.7198433 2.107744 -0.543081 1.979564 -1.023599 1.878081 -1.096746 1.906582 -0.9218854 0.562379 -2.102844 0.4422576 -2.068235 0.59696 -1.982626 -1.234656 -1.743143 -1.276229 -1.624999 -1.116772 -1.701479 -1.580707 -1.394938 -1.58466 -1.269225 -1.455769 -1.390961 -1.644816 -1.294217 -1.6321 -1.168263 -1.520465 -1.307097 -1.60319 -1.342725 -1.58444 -1.216647 -1.479604 -1.361853 -1.514079 -1.462067 -1.494064 -1.335484 -1.390691 -1.4826 -1.384768 -1.625519 -1.366659 -1.498316 -1.261086 -1.644144 -1.204656 -1.822145 -1.191519 -1.694454 -1.080349 -1.83564 -0.9443173 -2.044342 -0.940212 -1.91659 -0.8193847 -2.04854 -0.7876442 -2.20231 -0.785547 -2.075337 -0.6626618 -2.20444 -0.8528771 -2.281112 -0.8407279 -2.155345 -0.728469 -2.293394 -0.9368213 -2.36087 -0.9145814 -2.237124 -0.8138157 -2.383244 -1.112932 -2.404744 -1.07704 -2.284759 -0.9931954 -2.440711 -2.218432 -1.644831 -2.125159 -1.561603 -2.135214 -1.738116 -1.257064 2.595961 -1.193071 2.488556 -1.364441 2.531952 -0.9379118 2.858192 -0.8892377 2.743013 -1.053046 2.809499 -0.7321026 3.045295 -0.6933121 2.9264 -0.8509314 3.006483 -0.5908758 3.202059 -0.5588552 3.081146 -0.7117049 3.170016 -0.4883708 3.344932 -0.4612444 3.222813 -0.6103919 3.317784 -0.4103639 3.480829 -0.3869377 3.357939 -0.5331491 3.457383 -0.3484901 3.61299 -0.3279695 3.489582 -0.4717942 3.592452 -0.2974304 3.743053 -0.2792711 3.619284 -0.4211043 3.724879 -0.922076 3.744982 -0.8846373 3.624865 -1.041338 3.707275 -0.8384158 3.776467 -0.8046925 3.651251 -0.9587808 3.741384 -0.9640235 3.741431 -0.9275643 3.611905 -1.083588 3.701935 -1.19166 3.731498 -1.149686 3.600354 -1.309402 3.684746 -1.551045 3.754882 -1.500204 3.627233 -1.665239 3.698051 -2.135122 3.7233 -2.069889 3.606486 -2.241751 3.651836 -3.093592 3.338529 -3.005204 3.244634 -3.181981 3.244634 -4.412908 1.78806 -4.294648 1.746063 -4.453399 1.665402 -4.681715 -1.262627 -4.563798 -1.219675 -4.640234 -1.384726 -3.571518 -3.236663 -3.486827 -3.13942 -3.479581 -3.326242 6.117595 2.388268 6.076584 2.26463 5.999514 2.431208 5.458568 3.62259 5.440418 3.495155 5.334892 3.641291 3.761134 5.350314 3.785971 5.225664 3.638626 5.325042 -0.2863074 6.525528 -0.1941347 6.439925 -0.370742 6.43208 -4.001448 5.154341 -3.877935 5.134631 -4.020671 5.027701 -5.368077 3.714077 -5.243993 3.729914 -5.352968 3.584015 -5.907233 2.802951 -5.78624 2.836693 -5.875835 2.672927 2.658407 -1.083024 2.542824 -1.035021 2.590422 -0.9184537 2.601168 -0.8836414 2.485035 -0.836823 2.531275 -0.7192362 2.441878 -0.9677139 2.331116 -0.9088466 2.389053 -0.7963075 2.323347 -0.9626278 2.217044 -0.8957719 2.282806 -0.7877012 2.222905 -0.9395153 2.121213 -0.8658023 2.193902 -0.7626783 2.086987 -1.015173 1.994359 -0.9303989 2.078293 -0.8368434 1.835072 -1.280019 1.76323 -1.177107 1.865522 -1.104829 1.229513 -1.781808 1.206414 -1.658562 1.329261 -1.635388 -0.06516222 -2.102375 -0.002984235 -1.993639 0.1054542 -2.055988 -1.002904 -1.79187 -0.8929938 -1.732033 -0.8334561 -1.842497 -1.310127 -1.531044 -1.188191 -1.503259 -1.160686 -1.626436 -1.424296 -1.392107 -1.299475 -1.385305 -1.292789 -1.512286 -1.423173 -1.381259 -1.29824 -1.385434 -1.302315 -1.513429 -1.364863 -1.450447 -1.240194 -1.459807 -1.249284 -1.588181 -1.264404 -1.573591 -1.139824 -1.584139 -1.15005 -1.712639 -1.116182 -1.737867 -0.9914303 -1.745975 -0.9993083 -1.87437 -0.8951047 -1.934922 -0.7701111 -1.936211 -0.7713704 -2.06415 -0.6630335 -2.111967 -0.5381367 -2.106797 -0.5330575 -2.233929 -0.7212407 -2.194987 -0.5963565 -2.200431 -0.6017354 -2.326813 -0.815653 -2.273841 -0.6917944 -2.290802 -0.708648 -2.415455 -1.026801 -2.307615 -0.9063092 -2.340953 -0.9395755 -2.461702 -2.301797 -1.287023 -2.230745 -1.389879 -2.333588 -1.46094 -1.207887 2.481348 -1.314882 2.4167 -1.379512 2.523723 -0.9161073 2.73408 -1.030759 2.684259 -1.080559 2.798958 -0.7295562 2.917471 -0.8478954 2.877188 -0.8881552 2.995596 -0.6030943 3.072661 -0.7234512 3.038884 -0.7572035 3.159329 -0.5130241 3.214821 -0.6345937 3.185713 -0.6636769 3.307385 -0.4464793 3.350368 -0.5688299 3.324746 -0.5944292 3.447206 -0.396117 3.482322 -0.518997 3.459377 -0.5419211 3.582366 -0.3576211 3.612201 -0.480873 3.591353 -0.5017048 3.714705 -0.003498156 3.743692 -0.1282135 3.735231 -0.1366445 3.86039 -0.339343 3.763285 -0.4629031 3.743841 -0.4818213 3.870836 -0.6089263 3.735236 -0.7310498 3.706875 -0.7577115 3.836783 -0.9092987 3.72838 -1.029348 3.690382 -1.064178 3.821351 -1.325905 3.748111 -1.442197 3.697861 -1.488035 3.825347 -1.973718 3.699696 -2.081927 3.632163 -2.144503 3.748945 -3.005204 3.266382 -3.093592 3.173299 -3.181981 3.266382 -4.322237 1.680011 -4.360803 1.55696 -4.479705 1.596872 -4.604904 -1.073511 -4.567272 -1.197077 -4.686473 -1.236086 -3.754926 -2.858548 -3.670235 -2.955791 -3.762172 -3.04537 6.245887 2.04977 6.130404 2.099274 6.178245 2.218774 5.625362 3.381338 5.502695 3.405796 5.526734 3.530602 3.095537 5.780692 2.976867 5.741141 2.937592 5.860644 -3.19144 5.724978 -3.225465 5.603718 -3.345745 5.63802 -5.436002 3.656871 -5.415086 3.5305 -5.538323 3.509053 -5.969141 2.701572 -5.931369 2.576203 -6.050525 2.53646 2.528445 -1.073512 2.473851 -1.186971 2.415997 -1.018427 2.408167 -1.048015 2.346652 -1.158381 2.299351 -0.9856236 2.333439 -0.9383327 2.26877 -1.047143 2.226468 -0.872552 2.276686 -0.8039576 2.210144 -0.9115501 2.170869 -0.7362991 2.204295 -0.7344129 2.132958 -0.8384767 2.10165 -0.6620894 2.100738 -0.7827285 2.019299 -0.8784634 2.005908 -0.7005116 1.932486 -0.9898812 1.834865 -1.068353 1.854415 -0.8917587 1.397111 -1.583595 1.275332 -1.611848 1.36892 -1.46155 -0.1772645 -2.055975 -0.2762699 -1.979528 -0.1009564 -1.956789 -1.111755 -1.695202 -1.153671 -1.576922 -0.9939919 -1.653102 -1.397893 -1.436953 -1.406827 -1.31106 -1.273213 -1.427933 -1.43229 -1.388471 -1.427429 -1.261407 -1.307385 -1.393416 -1.375955 -1.451123 -1.365958 -1.323454 -1.251355 -1.461366 -1.268246 -1.574081 -1.257729 -1.445816 -1.143689 -1.584911 -1.112116 -1.735951 -1.104518 -1.607269 -0.987347 -1.743787 -0.893075 -1.924267 -0.8919431 -1.795637 -0.7680802 -1.925432 -0.67443 -2.096179 -0.6788477 -1.96831 -0.549508 -2.091657 -0.7037899 -2.187844 -0.6993912 -2.060687 -0.5788673 -2.192322 -0.7303998 -2.290316 -0.7181468 -2.164414 -0.6060018 -2.302717 -0.7529316 -2.402097 -0.7338145 -2.277772 -0.6294022 -2.421337 -0.7876585 -2.514387 -0.7617982 -2.391838 -0.6653628 -2.5403 -1.215905 -2.470145 -1.165843 -2.355602 -1.101368 -2.520209 -0.4338869 2.84327 -0.4096903 2.72061 -0.5565226 2.819069 -0.2920811 2.986085 -0.274753 2.862248 -0.4158742 2.968751 -0.2115146 3.117361 -0.1980829 2.99302 -0.3357908 3.103923 -0.1595058 3.244927 -0.1485817 3.12032 -0.2840276 3.233995 -0.1230011 3.371113 -0.1138284 3.246351 -0.2476641 3.361933 -0.09577227 3.496807 -0.08789642 3.371948 -0.2205239 3.488925 -0.0744469 3.622391 -0.06757594 3.497472 -0.1992579 3.615514 -0.0569936 3.748025 -0.05093205 3.623073 -0.1818465 3.741958 -1.391087 3.603307 -1.342499 3.487656 -1.506257 3.554515 -1.459449 3.616373 -1.410251 3.497777 -1.57436 3.565596 -1.67185 3.536416 -1.617772 3.415327 -1.784547 3.478312 -1.993231 3.435517 -1.931412 3.314917 -2.101874 3.366893 -2.446026 3.306839 -2.373236 3.192579 -2.547646 3.224995 -3.075557 3.042997 -2.987656 2.943864 -3.16443 2.944949 -3.870851 2.415404 -3.76446 2.343587 -3.936471 2.298964 -4.621655 1.120214 -4.499353 1.092416 -4.647485 0.9885879 -4.790482 -0.7621111 -4.668329 -0.7336624 -4.763954 -0.8931091 -4.219895 -2.39295 -4.116141 -2.317373 -4.150182 -2.50543 6.343988 2.019661 6.293332 1.901485 6.229712 2.072045 5.676511 3.462613 5.651511 3.338025 5.554037 3.488045 3.029895 5.913514 3.069701 5.794174 2.911402 5.873423 -3.210899 5.814782 -3.091232 5.778347 -3.24702 5.694075 -5.56374 3.628733 -5.440529 3.650337 -5.542671 3.502394 -6.148794 2.526938 -6.030553 2.569552 -6.108247 2.402672 2.566136 -0.9757477 2.449493 -0.9303768 2.494432 -0.8126132 2.373256 -1.104564 2.263053 -1.044719 2.322047 -0.9329248 2.239335 -1.106255 2.134117 -1.037625 2.201601 -0.9306194 2.131528 -1.07533 2.031092 -0.9997012 2.105508 -0.8976272 1.993828 -1.118274 1.901877 -1.032463 1.986552 -0.9392779 1.788404 -1.270969 1.712389 -1.17078 1.81162 -1.094031 1.421449 -1.563509 1.376322 -1.446232 1.492892 -1.400832 0.6479435 -1.937942 0.6626741 -1.813305 0.7868031 -1.828096 -0.3736936 -1.9498 -0.2926295 -1.854199 -0.197479 -1.935647 -0.9455999 -1.693023 -0.8339494 -1.636356 -0.777743 -1.748923 -1.185696 -1.492354 -1.063804 -1.464247 -1.036103 -1.587926 -1.253544 -1.412535 -1.128993 -1.401706 -1.118401 -1.529041 -1.227786 -1.433342 -1.102799 -1.431449 -1.100959 -1.560104 -1.148098 -1.519444 -1.023111 -1.52129 -1.024896 -1.650492 -1.021355 -1.651403 -0.8963644 -1.652986 -0.8978951 -1.78226 -0.8363762 -1.816778 -0.7114025 -1.814136 -0.7088398 -1.942966 -0.6031054 -1.987251 -0.478434 -1.977961 -0.4693764 -2.105825 -0.6065173 -2.085424 -0.481522 -2.084317 -0.4804362 -2.211746 -0.6457689 -2.185091 -0.5210353 -2.193353 -0.5291928 -2.319681 -0.6995858 -2.288095 -0.5757849 -2.305482 -0.5930577 -2.430098 -0.8230312 -2.371641 -0.7014399 -2.400692 -0.7304324 -2.522527 -2.144407 -1.519536 -2.06738 -1.617987 -2.165826 -1.695018 -0.4239189 2.718414 -0.5464264 2.693571 -0.571264 2.816104 -0.2909924 2.860619 -0.4146853 2.842582 -0.4327151 2.96632 -0.2161012 2.99174 -0.3402944 2.977553 -0.3544734 3.101813 -0.1683836 3.119272 -0.2928336 3.107551 -0.304547 3.232087 -0.13556 3.245465 -0.2601588 3.235451 -0.2701649 3.360151 -0.1118292 3.371183 -0.2365219 3.362415 -0.2452819 3.487217 -0.09413319 3.496798 -0.2188885 3.488974 -0.2267059 3.613839 -0.08075855 3.622471 -0.2055574 3.615377 -0.2126456 3.740277 -0.01816561 3.746409 -0.1430746 3.741634 -0.1478438 3.866703 -0.6716418 3.761257 -0.7939897 3.735262 -0.8196022 3.859436 -1.212978 3.657657 -1.330856 3.614212 -1.372445 3.737349 -1.687782 3.533701 -1.80025 3.475153 -1.854803 3.595856 -2.256295 3.359124 -2.360247 3.283582 -2.429666 3.396702 -2.984843 3.01255 -3.073793 2.917089 -3.161616 3.013776 -3.825459 2.282442 -3.888378 2.166025 -3.996388 2.23384 -4.521446 1.003898 -4.544646 0.8722696 -4.667474 0.8971312 -4.694425 -0.5894343 -4.671843 -0.7216566 -4.794786 -0.7459433 -4.357928 -1.914192 -4.298985 -2.03415 -4.409216 -2.098295 6.523957 1.319702 6.416017 1.384137 6.479054 1.49447 6.035921 2.790485 5.917165 2.829774 5.956177 2.949377 0.08838835 6.646013 -1.850372e-018 6.557496 -0.08838835 6.646013 -5.884971 3.09106 -5.848805 2.970367 -5.968459 2.933888 -6.324898 2.05746 -6.271796 1.940903 -6.384957 1.886208 2.296307 -1.256122 2.230393 -1.363561 2.190098 -1.189445 2.220475 -1.143251 2.151285 -1.249039 2.11637 -1.072943 2.165683 -1.006027 2.094485 -1.110598 2.062942 -0.9335612 2.091428 -0.9275143 2.015537 -1.028496 1.992102 -0.8503583 1.989743 -0.9330229 1.905368 -1.02648 1.897515 -0.8475232 1.850636 -1.03166 1.754165 -1.111851 1.771148 -0.9343369 1.558949 -1.333232 1.444643 -1.384063 1.508362 -1.218374 0.7404774 -1.852823 0.6167525 -1.834964 0.7582861 -1.728751 -0.4408948 -1.900592 -0.5252068 -1.80797 -0.34861 -1.815972 -1.02948 -1.621249 -1.068884 -1.501759 -0.9108532 -1.581558 -1.220149 -1.459849 -1.23351 -1.333812 -1.095865 -1.446299 -1.224388 -1.451577 -1.226507 -1.32382 -1.099406 -1.44941 -1.149776 -1.527249 -1.14786 -1.398595 -1.02479 -1.529222 -1.02153 -1.652414 -1.019987 -1.523209 -0.89654 -1.65401 -0.8383995 -1.808838 -0.8408285 -1.679577 -0.7134231 -1.806326 -0.6167841 -1.971872 -0.6250085 -1.84328 -0.492055 -1.963393 -0.6088672 -2.071953 -0.6098151 -1.943773 -0.4838708 -2.070981 -0.6179151 -2.178535 -0.6113256 -2.051272 -0.4930889 -2.185253 -0.6077612 -2.300652 -0.5954141 -2.174664 -0.4833725 -2.313157 -0.567841 -2.436084 -0.5518462 -2.311297 -0.4438686 -2.452184 -0.4379142 -2.589344 -0.4229709 -2.465002 -0.3138107 -2.604316 6.617635 1.275768 6.552043 1.167029 6.511227 1.342796 6.100414 2.855863 6.06034 2.736625 5.982011 2.89622 0 6.734648 0.08838835 6.646126 -0.08838835 6.646126 -6.003153 3.057486 -5.883482 3.093907 -5.967046 2.936775 -6.485463 1.843358 -6.373687 1.900965 -6.429507 1.728284 2.302701 -1.240549 2.193048 -1.179823 2.253059 -1.068864 2.149838 -1.251442 2.045811 -1.181015 2.115118 -1.075308 2.032624 -1.21554 1.933686 -1.137807 2.01008 -1.037134 1.890973 -1.233239 1.800251 -1.145838 1.886242 -1.053628 1.713169 -1.309936 1.635425 -1.210734 1.733307 -1.131942 1.453625 -1.469268 1.39771 -1.356385 1.509506 -1.299926 0.986049 -1.726876 0.969893 -1.602097 1.093845 -1.585833 0.2175072 -1.909699 0.2606334 -1.791705 0.3779583 -1.835078 -0.5000969 -1.799504 -0.4089725 -1.713297 -0.3234076 -1.805105 -0.8833 -1.597481 -0.7700344 -1.543945 -0.7171581 -1.658624 -1.057314 -1.455274 -0.9355414 -1.426497 -0.9073224 -1.550678 -1.083165 -1.422565 -0.9589985 -1.407756 -0.9445892 -1.535367 -1.033318 -1.467823 -0.9085417 -1.460094 -0.9010692 -1.589147 -0.9313936 -1.566218 -0.8065022 -1.560811 -0.8012917 -1.690402 -0.7779761 -1.701713 -0.6531632 -1.694624 -0.6463264 -1.824048 -0.569521 -1.855445 -0.44508 -1.843246 -0.4332716 -1.971807 -0.5338053 -1.960374 -0.4089335 -1.954559 -0.4032739 -2.082871 -0.5535294 -2.064816 -0.4285693 -2.068039 -0.4317274 -2.195564 -0.5681798 -2.181195 -0.4436481 -2.192147 -0.4544584 -2.318312 -0.5810199 -2.304477 -0.4572584 -2.322142 -0.4748108 -2.446702 -0.60301 -2.427687 -0.480339 -2.451745 -0.5043559 -2.574628 6.720831 -0.2085107 6.631527 -0.1199647 6.718991 -0.02955564 6.718708 -0.0966808 6.629619 -0.008868662 6.717302 0.08035171 -6.717302 0.08035171 -6.629619 -0.008868662 -6.718708 -0.0966808 -6.712924 0.3810499 -6.631093 0.2852444 -6.725585 0.2022761 2.086903 -1.353296 2.012761 -1.455356 1.986265 -1.278106 2.038128 -1.206395 1.96217 -1.307429 1.938854 -1.129091 1.963752 -1.115871 1.88333 -1.213356 1.868058 -1.033943 1.868973 -1.081338 1.781585 -1.172225 1.779595 -0.9924741 1.75122 -1.106598 1.654545 -1.186867 1.671979 -1.008669 1.555546 -1.250134 1.445775 -1.310437 1.495751 -1.139429 1.123427 -1.571189 0.999793 -1.589711 1.105 -1.446919 0.2620993 -1.860955 0.1476521 -1.810476 0.3123669 -1.746027 -0.5256907 -1.763079 -0.6006524 -1.662427 -0.425662 -1.68765 -0.9371743 -1.553908 -0.9746365 -1.43331 -0.81792 -1.516024 -1.046625 -1.467688 -1.064009 -1.341534 -0.92284 -1.449971 -1.022217 -1.489352 -1.030573 -1.361201 -0.8974966 -1.480766 -0.9279512 -1.575069 -0.9334198 -1.445916 -0.8030709 -1.569413 -0.7789486 -1.699817 -0.7857177 -1.570299 -0.654132 -1.692793 -0.5799224 -1.844083 -0.5910619 -1.714958 -0.4554197 -1.83253 -0.5416644 -1.947189 -0.5468394 -1.818191 -0.4167715 -1.941844 -0.5461828 -2.051957 -0.5434592 -1.923548 -0.4212125 -2.054755 -0.5290363 -2.174841 -0.5204293 -2.047567 -0.404333 -2.183625 -0.4819299 -2.31283 -0.4696596 -2.186799 -0.3575337 -2.325262 -0.3740832 -2.464057 -0.3615978 -2.338888 -0.2497083 -2.476622 6.802774 -0.2581162 6.71347 -0.3466622 6.71531 -0.1677071 6.804984 -0.1155787 6.715896 -0.2033908 6.717302 -0.02635832 -6.804984 -0.1155787 -6.717302 -0.02635832 -6.715896 -0.2033908 -6.810961 0.1152768 -6.718729 0.2008155 -6.726592 0.02176642 2.051864 -1.401315 1.949271 -1.328885 2.020682 -1.22483 1.920241 -1.365869 1.823331 -1.285527 1.902284 -1.186912 1.77573 -1.358884 1.686843 -1.269363 1.774731 -1.178825 1.61883 -1.377611 1.540844 -1.278252 1.638534 -1.198933 1.418887 -1.447085 1.357417 -1.336737 1.466258 -1.274416 1.109168 -1.590893 1.075335 -1.469322 1.195669 -1.435141 0.5992614 -1.765145 0.6098364 -1.639601 0.7343882 -1.65026 -0.04919449 -1.800449 0.01298055 -1.691133 0.1214207 -1.75381 -0.5393493 -1.665738 -0.4420713 -1.58637 -0.3635729 -1.684725 -0.8221803 -1.504153 -0.7072399 -1.454171 -0.6581105 -1.571106 -0.9231166 -1.420318 -0.8015808 -1.390353 -0.7723566 -1.51497 -0.9144752 -1.422385 -0.7907875 -1.403719 -0.7727226 -1.531524 -0.8417165 -1.485923 -0.717369 -1.472669 -0.7046137 -1.601877 -0.7179519 -1.592023 -0.5935084 -1.579756 -0.5817273 -1.709337 -0.5416347 -1.724593 -0.4174809 -1.709496 -0.4029604 -1.838572 -0.4827158 -1.829657 -0.3580674 -1.819957 -0.3486989 -1.949018 -0.4950623 -1.933036 -0.3700627 -1.932719 -0.3697542 -2.061322 -0.4972749 -2.052225 -0.3724842 -2.059606 -0.3797137 -2.18702 -0.4874533 -2.18336 -0.3631597 -2.196803 -0.3764301 -2.322709 -0.4554581 -2.32342 -0.3316831 -2.340982 -0.3491401 -2.465499 5.98383 -3.17462 5.943756 -3.055382 6.062159 -3.015026 -0.08838835 -6.768749 0 -6.680228 0.08838835 -6.768749 -6.062159 -3.015026 -5.943756 -3.055382 -5.98383 -3.17462 1.885744 -1.41284 1.804471 -1.50939 1.790772 -1.330217 1.816301 -1.304856 1.731068 -1.398097 1.724866 -1.21794 1.734807 -1.231257 1.644138 -1.319009 1.64876 -1.13879 1.637434 -1.200268 1.539677 -1.279503 1.559534 -1.100836 1.494196 -1.245653 1.38669 -1.310252 1.430418 -1.136765 1.232861 -1.409508 1.113423 -1.446702 1.195987 -1.289036 0.7141618 -1.665488 0.5897447 -1.653356 0.72622 -1.540308 -0.01105218 -1.764197 -0.1146314 -1.693777 0.05892167 -1.659957 -0.5598846 -1.63786 -0.6267541 -1.531257 -0.4542746 -1.570362 -0.8267477 -1.497726 -0.8635366 -1.376342 -0.707284 -1.460346 -0.8776778 -1.462661 -0.8987044 -1.336402 -0.754459 -1.441116 -0.8269756 -1.506131 -0.8408773 -1.377806 -0.702751 -1.49177 -0.7133115 -1.598474 -0.7254319 -1.469205 -0.5889005 -1.585881 -0.5464262 -1.719518 -0.5606207 -1.590179 -0.4222348 -1.704735 -0.490194 -1.819778 -0.4990758 -1.690192 -0.36551 -1.810547 -0.4955389 -1.91973 -0.4958176 -1.790329 -0.3705392 -1.919442 -0.4791445 -2.039841 -0.4729916 -1.911384 -0.354296 -2.046172 -0.4361178 -2.176742 -0.4257299 -2.049547 -0.3115502 -2.187349 -0.3497464 -2.326841 -0.337888 -2.200794 -0.2253102 -2.338853 6.073212 -3.187958 5.954455 -3.227248 6.0342 -3.068356 0 -6.858009 -0.08838835 -6.769492 0.08838835 -6.769492 -6.073212 -3.187958 -6.0342 -3.068356 -5.954455 -3.227248 1.78482 -1.531745 1.690882 -1.447911 1.773348 -1.352415 1.644173 -1.494514 1.557853 -1.402321 1.648262 -1.314297 1.507542 -1.464637 1.430438 -1.364273 1.528825 -1.28562 1.348087 -1.468123 1.283949 -1.358903 1.391239 -1.293612 1.127325 -1.524984 1.083375 -1.406261 1.200393 -1.361672 0.7879325 -1.631655 0.7760938 -1.505801 0.9005319 -1.493827 0.301969 -1.713591 0.3344511 -1.591686 0.455157 -1.624491 -0.1959119 -1.669623 -0.1211491 -1.568331 -0.02097167 -1.643926 -0.5463138 -1.541005 -0.4442078 -1.467771 -0.3721012 -1.571473 -0.7402139 -1.424453 -0.6242335 -1.376771 -0.5776123 -1.495391 -0.7864714 -1.3847 -0.6652397 -1.353289 -0.6347784 -1.4783 -0.7488455 -1.412596 -0.6257049 -1.390284 -0.604225 -1.518197 -0.6549026 -1.489221 -0.5311498 -1.470839 -0.5135359 -1.599989 -0.5110332 -1.598646 -0.3873063 -1.580057 -0.3695118 -1.709305 -0.4439943 -1.698144 -0.3196409 -1.68491 -0.3069429 -1.814515 -0.4541516 -1.797191 -0.3291887 -1.794033 -0.3261421 -1.923596 -0.4526173 -1.914887 -0.3277121 -1.919899 -0.332581 -2.048482 -0.4378178 -2.047532 -0.3133121 -2.05887 -0.3244184 -2.185977 -0.4024266 -2.19142 -0.2783871 -2.207077 -0.293853 -2.33265 2.910477 -6.151046 2.950283 -6.031706 3.068776 -6.071797 -3.068776 -6.071797 -2.950283 -6.031706 -2.910477 -6.151046 1.641783 -1.497121 1.55124 -1.584996 1.555603 -1.404796 1.582904 -1.38406 1.488571 -1.467865 1.50089 -1.287667 1.508802 -1.306045 1.409286 -1.383265 1.433161 -1.204452 1.402991 -1.282781 1.296265 -1.348991 1.337919 -1.17419 1.229885 -1.340429 1.113814 -1.38743 1.18349 -1.22284 0.9156465 -1.491309 0.7913303 -1.504489 0.9025888 -1.365828 0.3972732 -1.646372 0.2777554 -1.609473 0.4338859 -1.525919 -0.1514273 -1.643893 -0.2455694 -1.560903 -0.06919426 -1.548884 -0.5507781 -1.527117 -0.6111787 -1.416187 -0.4413397 -1.465893 -0.7071937 -1.446221 -0.7439897 -1.324195 -0.5877322 -1.408635 -0.7139991 -1.446643 -0.7382692 -1.320284 -0.5913779 -1.421633 -0.6398051 -1.505624 -0.6585777 -1.377286 -0.5162228 -1.486129 -0.5082301 -1.601426 -0.5262267 -1.472329 -0.3845324 -1.582643 -0.4481598 -1.693244 -0.4605707 -1.563369 -0.3237775 -1.680286 -0.4570439 -1.786388 -0.4598989 -1.656194 -0.3320765 -1.783414 -0.444289 -1.901572 -0.4399443 -1.772045 -0.3193646 -1.906077 -0.4076794 -2.035425 -0.3983632 -1.907088 -0.283027 -2.045016 -0.3380093 -2.183949 -0.3261441 -2.056912 -0.2135737 -2.196063 3.057106 -6.178878 2.938436 -6.139326 3.096381 -6.059374 -3.057106 -6.178878 -3.096381 -6.059374 -2.938436 -6.139326 1.490629 -1.639418 1.407816 -1.543938 1.501448 -1.45949 1.378292 -1.565629 1.303089 -1.463569 1.402937 -1.3867 1.252487 -1.516839 1.187559 -1.407707 1.294373 -1.341369 1.089576 -1.507961 1.039876 -1.391079 1.154571 -1.340432 0.8556776 -1.545114 0.8294788 -1.420979 0.9517025 -1.39437 0.5157247 -1.603991 0.5240244 -1.477644 0.6487485 -1.486052 0.09712505 -1.613798 0.1464579 -1.497509 0.2613112 -1.547459 -0.2686478 -1.539114 -0.1850732 -1.444786 -0.09212004 -1.529597 -0.5307801 -1.426676 -0.424746 -1.35912 -0.358551 -1.467335 -0.644737 -1.353966 -0.5281128 -1.307697 -0.4831261 -1.427644 -0.6497726 -1.34715 -0.5288706 -1.314232 -0.4971262 -1.439605 -0.5875983 -1.394224 -0.4650378 -1.36857 -0.4404628 -1.496513 -0.4746473 -1.479686 -0.3515936 -1.456666 -0.3296208 -1.585582 -0.4127738 -1.568813 -0.2887934 -1.552126 -0.2728605 -1.681977 -0.4234209 -1.661244 -0.2985392 -1.655571 -0.293103 -1.785906 -0.4216633 -1.774757 -0.2967023 -1.777992 -0.2998209 -1.907641 -0.4069972 -1.905825 -0.2823867 -1.915976 -0.2922477 -2.044242 -0.3751356 -2.050499 -0.2510107 -2.065562 -0.2657761 -2.192188 -0.08838835 -6.821039 0 -6.731534 0.08838835 -6.821039 1.407782 -1.539413 1.309329 -1.618187 1.330763 -1.438716 1.363232 -1.419917 1.261384 -1.494116 1.290761 -1.315641 1.290698 -1.344269 1.183714 -1.410333 1.226048 -1.234944 1.173318 -1.327782 1.059299 -1.379933 1.122086 -1.211715 0.973916 -1.385316 0.8521079 -1.413766 0.9458487 -1.261846 0.6376432 -1.498092 0.5129818 -1.488801 0.646837 -1.372107 0.1838923 -1.565758 0.07153602 -1.51042 0.2386743 -1.452261 -0.2298777 -1.523261 -0.3152611 -1.430833 -0.1385833 -1.436818 -0.5027727 -1.433074 -0.5586997 -1.319213 -0.390982 -1.376111 -0.5840092 -1.395972 -0.6211036 -1.273355 -0.46464 -1.357868 -0.5563514 -1.421542 -0.5834305 -1.29508 -0.4343198 -1.39348 -0.4616828 -1.491284 -0.4846456 -1.363042 -0.3388101 -1.467318 -0.4118108 -1.569851 -0.4278143 -1.44006 -0.2878395 -1.553096 -0.4260027 -1.654669 -0.4312567 -1.523949 -0.3011132 -1.64917 -0.4182214 -1.763313 -0.4153333 -1.632897 -0.2932548 -1.766327 -0.3883586 -1.89254 -0.3796735 -1.763145 -0.2636607 -1.901552 -0.3315813 -2.038385 -0.3193945 -1.910318 -0.2071768 -2.050931 0 -6.915487 -0.08838835 -6.826028 0.08838835 -6.826028 1.228002 -1.676741 1.155767 -1.572369 1.257782 -1.498465 1.135869 -1.584074 1.071577 -1.474235 1.178776 -1.40836 1.017145 -1.523842 0.9644486 -1.407844 1.077798 -1.353916 0.8527091 -1.502635 0.8175027 -1.380256 0.9374422 -1.344333 0.61774 -1.515341 0.6085796 -1.388548 0.7332435 -1.379231 0.3037108 -1.528711 0.3293753 -1.404525 0.4517123 -1.430577 -0.03151392 -1.494535 0.03053846 -1.384338 0.1390488 -1.447355 -0.3053751 -1.412568 -0.2146477 -1.324929 -0.128662 -1.417401 -0.4865482 -1.326226 -0.3777615 -1.263069 -0.3161949 -1.374666 -0.5418615 -1.289253 -0.4248033 -1.243906 -0.3809584 -1.364973 -0.514841 -1.307403 -0.3942554 -1.273079 -0.3613296 -1.398786 -0.4319968 -1.368678 -0.3100071 -1.340088 -0.2827397 -1.467995 -0.3861581 -1.443674 -0.262654 -1.423424 -0.2433737 -1.55314 -0.3989999 -1.527718 -0.2742371 -1.519647 -0.2665408 -1.650487 -0.3987429 -1.635046 -0.2737545 -1.63682 -0.2754528 -1.767358 -0.3859993 -1.762323 -0.2613318 -1.771775 -0.2704427 -1.901111 -0.358669 -1.905643 -0.2345314 -1.92072 -0.2491896 -2.048398 1.197523 -1.538551 1.092834 -1.608616 1.129221 -1.43116 1.159745 -1.420697 1.051732 -1.485228 1.09683 -1.309908 1.085165 -1.349585 0.9720895 -1.404085 1.031884 -1.233925 0.956753 -1.335965 0.8373062 -1.373494 0.9199095 -1.214297 0.7421016 -1.381778 0.6174961 -1.391845 0.7321788 -1.255356 0.4181044 -1.448559 0.2963711 -1.419817 0.4464948 -1.32532 0.05416387 -1.460652 -0.05072019 -1.391768 0.1221663 -1.354409 -0.2647848 -1.407727 -0.3421452 -1.307928 -0.1665994 -1.329095 -0.4324127 -1.350613 -0.4851181 -1.234568 -0.3190675 -1.296653 -0.4604946 -1.345686 -0.4979292 -1.222486 -0.3412317 -1.307015 -0.4055289 -1.389376 -0.4349314 -1.262798 -0.2840362 -1.358743 -0.3785988 -1.451156 -0.39847 -1.321894 -0.2551884 -1.430343 -0.3996408 -1.526387 -0.4072887 -1.395469 -0.274875 -1.518362 -0.3976886 -1.627188 -0.3960658 -1.496132 -0.2726992 -1.62889 -0.3744738 -1.750454 -0.3661385 -1.620171 -0.249752 -1.759161 -0.3280546 -1.892348 -0.3153231 -1.763317 -0.2037047 -1.905559 0.998467 -1.663633 0.9360628 -1.552456 1.044371 -1.488398 0.9193236 -1.561954 0.8655585 -1.446057 0.9784049 -1.390839 0.806011 -1.493716 0.7653234 -1.372564 0.8835162 -1.330858 0.6443446 -1.459603 0.6232105 -1.33372 0.746411 -1.312126 0.4211944 -1.447397 0.427705 -1.320219 0.5525353 -1.326852 0.1503493 -1.425778 0.1904901 -1.305319 0.3088696 -1.346165 -0.1042652 -1.371196 -0.03245137 -1.26695 0.06986078 -1.340121 -0.3141606 -1.294155 -0.2175561 -1.212942 -0.1382303 -1.311845 -0.4233776 -1.236982 -0.3125819 -1.177305 -0.2547085 -1.291553 -0.4354957 -1.228529 -0.3180961 -1.183902 -0.2751737 -1.305964 -0.3839711 -1.265863 -0.2636298 -1.230457 -0.2298221 -1.356488 -0.3621889 -1.324155 -0.2393096 -1.300062 -0.2163817 -1.429183 -0.3786119 -1.398335 -0.2540111 -1.387839 -0.2440285 -1.518848 -0.3809102 -1.497887 -0.255911 -1.498365 -0.2563658 -1.62954 -0.3708212 -1.619618 -0.2461224 -1.628677 -0.2547956 -1.75891 -0.3478733 -1.759741 -0.2237666 -1.775205 -0.2386844 -1.903856 1.009893 -1.505881 0.9002431 -1.567614 0.9498766 -1.393094 0.9741595 -1.392875 0.8611587 -1.447777 0.9207197 -1.276783 0.8963768 -1.325833 0.77856 -1.368589 0.8546132 -1.205214 0.760894 -1.310492 0.6379205 -1.333343 0.7384774 -1.185137 0.5458449 -1.337757 0.4210469 -1.33054 0.552949 -1.210985 0.2594433 -1.364884 0.142522 -1.320035 0.3036518 -1.246267 -0.02463472 -1.350424 -0.1219625 -1.270743 0.05380187 -1.251552 -0.258103 -1.302808 -0.3288754 -1.197615 -0.1550677 -1.230553 -0.3498087 -1.276142 -0.3999863 -1.158311 -0.235322 -1.224498 -0.3401009 -1.29505 -0.3776306 -1.171223 -0.2208679 -1.256074 -0.346493 -1.338285 -0.3707332 -1.210071 -0.2238658 -1.312941 -0.3761127 -1.402789 -0.386298 -1.272044 -0.2515283 -1.3921 -0.3808088 -1.49463 -0.3803618 -1.363242 -0.2558096 -1.4951 -0.3641532 -1.610822 -0.355961 -1.479901 -0.2394219 -1.619421 -0.3264271 -1.747589 -0.31295 -1.617765 -0.2021557 -1.761668 0.7998601 -1.614895 0.7465361 -1.498477 0.8595916 -1.443568 0.7293096 -1.5089 0.6855804 -1.388371 0.8026819 -1.343361 0.6216671 -1.434128 0.5925483 -1.309329 0.7141093 -1.279435 0.4682082 -1.387296 0.4603795 -1.259641 0.5851341 -1.25163 0.2675564 -1.353613 0.2880713 -1.227757 0.4113763 -1.248696 0.04739163 -1.30997 0.09946985 -1.194043 0.2131046 -1.247172 -0.1455312 -1.249499 -0.06540177 -1.151394 0.03053716 -1.233333 -0.2949316 -1.186563 -0.1937916 -1.111026 -0.1203362 -1.215032 -0.3487687 -1.156611 -0.2363898 -1.099893 -0.1816543 -1.216343 -0.3322274 -1.171055 -0.2143725 -1.12756 -0.1727166 -1.25062 -0.3393187 -1.211309 -0.217288 -1.182903 -0.1902046 -1.310894 -0.3607585 -1.274286 -0.2363784 -1.261212 -0.2239455 -1.392008 -0.3664679 -1.364736 -0.2414699 -1.363989 -0.2407594 -1.495492 -0.3594197 -1.479517 -0.2347064 -1.488396 -0.2431674 -1.619275 -0.3405108 -1.61489 -0.2164708 -1.631027 -0.231933 -1.760474 0.8436214 -1.449347 0.7300168 -1.503112 0.7914773 -1.332213 0.8076558 -1.341803 0.6907067 -1.387207 0.7635206 -1.221492 0.7276419 -1.277264 0.6063749 -1.308329 0.6973218 -1.153015 0.5912693 -1.256717 0.4665519 -1.265287 0.5828693 -1.129477 0.390017 -1.264437 0.2670549 -1.24157 0.4124963 -1.139353 0.153962 -1.264609 0.04273672 -1.206604 0.2110053 -1.15151 -0.06977383 -1.241365 -0.1593939 -1.15256 0.01736543 -1.150031 -0.223245 -1.208894 -0.2885831 -1.099605 -0.1166808 -1.141885 -0.2670918 -1.206948 -0.3145175 -1.087334 -0.151438 -1.157899 -0.3135276 -1.232103 -0.3429221 -1.105552 -0.1920329 -1.201485 -0.3540011 -1.284748 -0.3670262 -1.154573 -0.2296815 -1.27111 -0.3663865 -1.366591 -0.3671038 -1.235207 -0.2413886 -1.365837 -0.3563626 -1.474772 -0.3481392 -1.343524 -0.2316334 -1.483425 -0.326243 -1.605437 -0.3118087 -1.475081 -0.2020791 -1.620591 0.6294298 -1.540862 0.5844705 -1.420417 0.7011053 -1.373989 0.5654055 -1.432624 0.5311603 -1.308653 0.6513778 -1.273339 0.4649874 -1.352141 0.4468513 -1.224995 0.5705287 -1.20635 0.3250538 -1.294024 0.3295721 -1.166024 0.4544904 -1.170654 0.1539309 -1.244634 0.1867804 -1.121306 0.3073868 -1.154897 -0.01519026 -1.190742 0.04680062 -1.079727 0.1553461 -1.143128 -0.1619835 -1.13306 -0.0746572 -1.04127 0.0147806 -1.130893 -0.2549097 -1.089731 -0.1502093 -1.019217 -0.08192448 -1.127335 -0.278806 -1.083939 -0.1645969 -1.031123 -0.1137907 -1.149849 -0.3160631 -1.105981 -0.1952313 -1.07254 -0.1632206 -1.19877 -0.3443119 -1.1564 -0.2202393 -1.140454 -0.2050414 -1.270628 -0.354348 -1.236552 -0.229362 -1.234581 -0.2274884 -1.366071 -0.3506536 -1.343247 -0.2259385 -1.352121 -0.2343738 -1.483336 -0.3354731 -1.472633 -0.2115368 -1.489698 -0.2278088 -1.619677 0.6975158 -1.37477 0.5807718 -1.420923 0.652841 -1.254164 0.6609006 -1.272111 0.5409236 -1.308235 0.6258217 -1.148561 0.5811047 -1.208486 0.4575786 -1.228107 0.561966 -1.081844 0.4506672 -1.181236 0.3257631 -1.176238 0.4555621 -1.053702 0.2737768 -1.172896 0.1540942 -1.136149 0.3098474 -1.050969 0.08907621 -1.159466 -0.01601414 -1.090489 0.1567594 -1.052366 -0.08406762 -1.137688 -0.1662469 -1.041366 0.01012135 -1.053647 -0.1895687 -1.123385 -0.2485357 -1.009916 -0.0793512 -1.062678 -0.2773688 -1.133333 -0.3130973 -1.009234 -0.1575837 -1.096318 -0.3319934 -1.172906 -0.3483634 -1.043713 -0.2080699 -1.15584 -0.3535813 -1.243657 -0.3555243 -1.112614 -0.2285964 -1.24162 -0.3505083 -1.342999 -0.3420852 -1.211765 -0.2257924 -1.351862 -0.3273131 -1.466799 -0.311675 -1.336247 -0.2032951 -1.483261 0.4844443 -1.448939 0.4471793 -1.325433 0.5664954 -1.286859 0.4264853 -1.339354 0.4011452 -1.212941 0.5235498 -1.186771 0.3354385 -1.254023 0.3276269 -1.125631 0.4523826 -1.117591 0.2133529 -1.187148 0.2292145 -1.059951 0.353204 -1.076223 0.07514885 -1.128574 0.1188489 -1.008621 0.2359613 -1.053381 -0.04880391 -1.073445 0.02177607 -0.9676859 0.1249432 -1.040039 -0.1566271 -1.025494 -0.06313427 -0.9400836 0.01983617 -1.036326 -0.2257111 -1.004391 -0.1167095 -0.9411364 -0.05552403 -1.053825 -0.2906645 -1.008954 -0.1716057 -0.9694 -0.1335269 -1.093073 -0.3282557 -1.045272 -0.2046282 -1.025979 -0.1861555 -1.1551 -0.3438133 -1.113907 -0.218852 -1.110646 -0.2157449 -1.241781 -0.3438494 -1.21156 -0.2191446 -1.220593 -0.22773 -1.351801 -0.3321735 -1.334022 -0.2083856 -1.352284 -0.225751 -1.482459 0.5704556 -1.286613 0.4512459 -1.325514 0.5328518 -1.16329 0.5340271 -1.187919 0.4118317 -1.215049 0.5076965 -1.062012 0.4576341 -1.124209 0.3329131 -1.13277 0.449287 -0.9962985 0.3392624 -1.0908 0.2154686 -1.073099 0.356586 -0.9643149 0.1928036 -1.072227 0.07739852 -1.023232 0.2408314 -0.9544991 0.04940621 -1.056495 -0.0489011 -0.9776651 0.1266116 -0.9561193 -0.1106386 -1.043295 -0.1825003 -0.9384968 -0.008360092 -0.9696627 -0.2347126 -1.042635 -0.2785392 -0.9221296 -0.1176475 -0.9975204 -0.3085726 -1.067812 -0.3290889 -0.9400467 -0.1852677 -1.046554 -0.3417171 -1.126143 -0.3450322 -0.9957485 -0.216761 -1.122683 -0.3462599 -1.215816 -0.3374525 -1.084941 -0.2215705 -1.225061 -0.3296123 -1.332175 -0.3124627 -1.20181 -0.2057943 -1.350231 0.3623551 -1.344578 0.3321589 -1.218801 0.4534568 -1.18749 0.3109498 -1.23413 0.2939396 -1.106118 0.4177768 -1.088534 0.2314524 -1.145112 0.2333044 -1.016401 0.3582907 -1.018308 0.1301645 -1.072697 0.1564377 -0.9472252 0.2786454 -0.9742 0.02602969 -1.011351 0.07931327 -0.8953948 0.1923878 -0.9500364 -0.06115461 -0.964909 0.01716746 -0.8649927 0.1145874 -0.9453217 -0.1727059 -0.932933 -0.07110502 -0.8581164 0.001711571 -0.9625079 -0.2606169 -0.9211282 -0.1443238 -0.8738388 -0.09848822 -0.9938206 -0.3114455 -0.941425 -0.1884954 -0.9180266 -0.1659509 -1.045634 -0.3342914 -0.9970728 -0.2093723 -0.992377 -0.2048761 -1.122841 -0.3386044 -1.084791 -0.2139234 -1.094157 -0.2228477 -1.225014 -0.330304 -1.199653 -0.2067248 -1.219421 -0.225518 -1.349409 0.4613326 -1.188433 0.3402195 -1.220452 0.4304036 -1.063054 0.4266465 -1.092905 0.3029381 -1.111374 0.4087235 -0.9654341 0.356929 -1.02895 0.2319451 -1.026896 0.3589316 -0.9007727 0.2549601 -0.9917348 0.1333546 -0.9621634 0.2838929 -0.8674451 0.1296776 -0.9732378 0.01994128 -0.912168 0.1895353 -0.8612795 -0.01999307 -0.967791 -0.1066996 -0.8760012 0.07004567 -0.8793982 -0.1802527 -0.9604786 -0.2348285 -0.8454946 -0.06779613 -0.9046763 -0.2815562 -0.9701073 -0.3075006 -0.8443162 -0.1592783 -0.9434175 -0.3301358 -1.014174 -0.335088 -0.8846851 -0.205234 -1.00904 -0.3434747 -1.093186 -0.3340562 -0.9629845 -0.2188301 -1.103024 -0.3332488 -1.201661 -0.3141842 -1.071893 -0.2097112 -1.221687 0.2684157 -1.231046 0.2440142 -1.103837 0.3666094 -1.078517 0.2236448 -1.12054 0.2137411 -0.9917379 0.3383481 -0.9815007 0.162577 -1.029892 0.1722081 -0.9016288 0.2968365 -0.9115408 0.09316774 -0.9586109 0.1266807 -0.8351693 0.2471044 -0.869522 0.008441243 -0.9078061 0.06954738 -0.7963718 0.1785934 -0.8588163 -0.1058256 -0.8705961 -0.01586498 -0.7819489 0.07092263 -0.8738371 -0.2218155 -0.8436546 -0.1101176 -0.7861761 -0.05400534 -0.9005934 -0.2922599 -0.8455255 -0.1704083 -0.8168063 -0.1425302 -0.9423344 -0.3252529 -0.8861149 -0.2004049 -0.8797225 -0.1942435 -1.009252 -0.3346946 -0.9628853 -0.2100554 -0.9727987 -0.2195461 -1.102989 -0.329732 -1.06968 -0.2064486 -1.091348 -0.2270935 -1.220737 0.3760665 -1.0816 0.2536675 -1.107851 0.3506998 -0.9549324 0.3449005 -0.9896779 0.2203546 -1.000634 0.3342551 -0.8614946 0.2875604 -0.928182 0.163024 -0.9171736 0.2983165 -0.8007251 0.2014955 -0.8973072 0.08275679 -0.8575188 0.2405611 -0.7763716 0.07418453 -0.8902909 -0.0276462 -0.8166645 0.1466794 -0.7868707 -0.1048875 -0.886262 -0.1741306 -0.7805221 -0.0008183272 -0.8159073 -0.247151 -0.8807869 -0.2806582 -0.757835 -0.1267257 -0.8465767 -0.3180205 -0.9078154 -0.3250683 -0.7794222 -0.1932193 -0.9005646 -0.3421839 -0.9747062 -0.3318461 -0.8454428 -0.2176122 -0.9854334 -0.3384762 -1.074921 -0.3169417 -0.9461743 -0.2153451 -1.097438 0.2549773 -1.107692 0.2294795 -0.9810589 0.3518514 -0.9546731 0.2135881 -1.001223 0.2037015 -0.8729382 0.3283099 -0.8627599 0.1564225 -0.9172583 0.1679857 -0.7898303 0.2924497 -0.8016688 0.07536409 -0.8564793 0.1153538 -0.7357886 0.2337845 -0.7765414 -0.03148288 -0.8140575 0.04144498 -0.7108691 0.142966 -0.7849947 -0.1669433 -0.7777268 -0.06345155 -0.7064001 0.006651582 -0.8116982 -0.2678802 -0.7587769 -0.1479668 -0.7227005 -0.1126712 -0.8452669 -0.3160759 -0.7810234 -0.1913518 -0.7724797 -0.1830522 -0.9008748 -0.33204 -0.8454054 -0.2074703 -0.8561581 -0.2178328 -0.9854178 -0.3304583 -0.9438066 -0.2076051 -0.9679082 -0.2306723 -1.096271 0.3664599 -0.958926 0.2444463 -0.986922 0.3392991 -0.8331601 0.3372216 -0.8735619 0.2127087 -0.8848487 0.3261971 -0.7460862 0.2778008 -0.8236601 0.1535394 -0.8098557 0.2913698 -0.697243 0.1669013 -0.8106354 0.05185492 -0.7611285 0.215782 -0.6941151 -0.00390242 -0.8137504 -0.09189966 -0.7239861 0.08487531 -0.7247753 -0.1975909 -0.8014717 -0.2424431 -0.6831905 -0.08091492 -0.7560024 -0.3040514 -0.8073019 -0.3140164 -0.6801446 -0.1794492 -0.7971326 -0.3426422 -0.859557 -0.3309243 -0.7314286 -0.2181927 -0.8716213 -0.3457447 -0.9511024 -0.3209413 -0.8238222 -0.2232302 -0.9768705 0.2616341 -0.984399 0.2325637 -0.8591591 0.3541363 -0.8292117 0.2173668 -0.8843189 0.2057534 -0.756921 0.3302127 -0.7450333 0.1501347 -0.80995 0.1641752 -0.6835658 0.2883841 -0.6978521 0.046623 -0.7601022 0.09622182 -0.6438471 0.2109605 -0.6941015 -0.09179952 -0.721696 -0.003032872 -0.6326517 0.08497555 -0.7224631 -0.2324621 -0.6835804 -0.1163817 -0.6365316 -0.07000987 -0.7543066 -0.3057942 -0.6819755 -0.1813041 -0.670459 -0.1700248 -0.797566 -0.3307175 -0.7314801 -0.2062651 -0.7435132 -0.2179526 -0.871646 -0.3326119 -0.8212152 -0.2104103 -0.8485236 -0.2367116 -0.9754046 0.3764999 -0.83275 0.2556539 -0.8655065 0.3445432 -0.7088796 0.3431137 -0.7562451 0.2188461 -0.7699927 0.3296025 -0.6298032 0.2666017 -0.7245951 0.1428263 -0.7069426 0.2840558 -0.5994127 0.1087109 -0.7339106 0.001400906 -0.6692963 0.1728168 -0.6257494 -0.1176386 -0.7325357 -0.1805312 -0.62365 -0.009613051 -0.6691422 -0.2854715 -0.713613 -0.299971 -0.587871 -0.1613153 -0.6989283 -0.3454957 -0.7462993 -0.3316332 -0.619448 -0.2212668 -0.7604545 -0.3558111 -0.8286022 -0.3265248 -0.7033148 -0.2342903 -0.8587962 0.2706896 -0.8625606 0.2368534 -0.7392684 0.3571868 -0.7046001 0.2229056 -0.7693335 0.2088051 -0.6429745 0.3330073 -0.6286292 0.1400008 -0.7070732 0.1579044 -0.5819421 0.2816156 -0.6000512 -0.001419714 -0.6682917 0.06310091 -0.5603547 0.1701621 -0.6254031 -0.1757064 -0.6231231 -0.06812702 -0.5589354 -0.004474236 -0.6674189 -0.2924181 -0.5899759 -0.1684343 -0.5738602 -0.1525277 -0.6994739 -0.3310446 -0.6196428 -0.2068043 -0.6336943 -0.220565 -0.7605606 -0.3364677 -0.7003955 -0.2153112 -0.7320939 -0.2460699 -0.8569519 0.3891569 -0.7043964 0.2702692 -0.7437374 0.350547 -0.5832572 0.351853 -0.637795 0.2280672 -0.6553745 0.3344728 -0.5125902 0.2463071 -0.6339142 0.1236932 -0.6094555 0.270614 -0.5105348 0 -0.6634713 -0.08838835 -0.5746969 0.08838835 -0.5746969 -0.2549486 -0.6299029 -0.2777055 -0.5061998 -0.1320376 -0.6069994 -0.3522704 -0.632248 -0.3348172 -0.5068397 -0.2284948 -0.6499314 -0.3699368 -0.7045008 -0.3342005 -0.5819964 -0.250154 -0.741049 0.2832062 -0.7402819 0.2427482 -0.6198049 0.3610197 -0.5785925 0.2315777 -0.6545512 0.2135981 -0.5294432 0.3372983 -0.511259 0.1214166 -0.6095864 0.1461451 -0.4862839 0.2686747 -0.5111684 -0.08838835 -0.574042 0 -0.4852567 0.08838835 -0.574042 -0.2716621 -0.5082331 -0.1490032 -0.4839942 -0.1249245 -0.607469 -0.3338362 -0.507289 -0.2100316 -0.5247624 -0.2272769 -0.6502043 -0.3424615 -0.5786987 -0.2231211 -0.6167198 -0.260308 -0.7387373 0.405351 -0.5690629 0.290079 -0.6180425 0.3570045 -0.4522816 0.3662819 -0.5129061 0.2436473 -0.5372611 0.3420793 -0.3894993 0.1985171 -0.559875 0.07987345 -0.5204234 0.2378705 -0.4409353 -0.1985171 -0.559875 -0.2378705 -0.4409353 -0.07987345 -0.5204234 -0.3669407 -0.5109743 -0.3425951 -0.3875518 -0.2443345 -0.535482 -0.3901565 -0.5729996 -0.3445261 -0.4548856 -0.2737827 -0.6193124 0.3008504 -0.613986 0.2507086 -0.4980027 0.3652111 -0.4472125 0.2466829 -0.5361918 0.221848 -0.4129176 0.344356 -0.3879274 0.07809642 -0.5204567 0.1178306 -0.4016406 0.2363473 -0.4414752 -0.2363473 -0.4414752 -0.1178306 -0.4016406 -0.07809642 -0.5204567 -0.3414438 -0.3883379 -0.2187746 -0.4125247 -0.242801 -0.5360134 -0.3510437 -0.4511569 -0.2352354 -0.4989002 -0.2822825 -0.6164224 0.4251036 -0.4167439 0.3174252 -0.4807423 0.3616186 -0.3081946 0.3940686 -0.3638683 0.2753623 -0.403131 0.3549046 -0.244863 0 -0.5156085 -0.08838835 -0.427175 0.08838835 -0.427175 -0.3940686 -0.3638683 -0.3549046 -0.244863 -0.2753623 -0.403131 -0.418823 -0.4206496 -0.3567592 -0.3112202 -0.3103192 -0.4832429 0.3257212 -0.4760395 0.2605769 -0.3685005 0.3672598 -0.3028334 0.2781554 -0.4014647 0.2382296 -0.282716 0.3566818 -0.2426902 -0.08838835 -0.426935 0 -0.3385 0.08838835 -0.426935 -0.3566818 -0.2426902 -0.2382296 -0.282716 -0.2781554 -0.4014647 -0.3612706 -0.30708 -0.2535321 -0.3710002 -0.3169151 -0.4796519 0.4400705 -0.2254545 0.3510643 -0.3136093 0.3523045 -0.136054 0.4412255 -0.07719138 0.3521377 -0.1649202 0.3535422 0.01194272 -0.4412255 -0.07719138 -0.3535422 0.01194272 -0.3521377 -0.1649202 -0.4400705 -0.2254545 -0.3523045 -0.136054 -0.3510643 -0.3136093 0.3560251 -0.3086703 0.2670188 -0.2205155 0.3547849 -0.131115 0.3549467 -0.1593052 0.2658589 -0.0715763 0.3535422 0.01755779 -0.3535422 0.01755779 -0.2658589 -0.0715763 -0.3549467 -0.1593052 -0.3547849 -0.131115 -0.2670188 -0.2205155 -0.3560251 -0.3086703 0.3952825 0.03060546 0.3553567 -0.08814329 0.2768303 0.0706312 0 0.3682337 0.08838835 0.2797987 -0.08838835 0.2797987 -0.3952825 0.03060546 -0.2768303 0.0706312 -0.3553567 -0.08814329 0.3561188 -0.08576149 0.2374126 -0.04649882 0.2765766 0.07250653 0.08838835 0.2795704 0 0.1911369 -0.08838835 0.2795704 -0.2765766 0.07250653 -0.2374126 -0.04649882 -0.3561188 -0.08576149 0.1972991 0.229767 0.2370333 0.1109508 0.07878242 0.1899323 -0.1972991 0.229767 -0.07878242 0.1899323 -0.2370333 0.1109508 0.2372872 0.1099918 0.1186436 0.07054019 0.07929017 0.1894798 -0.07929017 0.1894798 -0.1186436 0.07054019 -0.2372872 0.1099918 0 0.2186884 0.08838835 0.1299031 -0.08838835 0.1299031 0.08838835 0.1293351 0 0.04056074 -0.08838835 0.1293351 + + + + + + + + + + + + + +

0 1 2 0 6 1 2 1 8 10 0 2 6 12 1 14 6 0 1 16 8 2 8 18 10 2 20 22 0 10 6 24 12 1 12 16 14 26 6 22 14 0 8 16 28 18 8 30 20 2 18 32 10 20 34 22 10 24 36 12 26 24 6 12 38 16 40 26 14 42 14 22 16 44 28 8 28 30 18 30 46 20 18 48 32 20 50 34 10 32 52 22 34 24 54 36 12 36 38 26 56 24 16 38 44 40 58 26 42 40 14 52 42 22 28 44 60 30 28 62 46 30 64 48 18 46 50 20 48 66 32 50 68 34 32 70 52 34 54 72 36 56 54 24 36 74 38 58 56 26 38 76 44 78 58 40 80 40 42 82 42 52 44 84 60 28 60 62 30 62 64 46 64 86 48 46 88 50 48 90 66 50 92 68 32 66 70 34 68 94 52 70 54 96 72 36 72 74 56 98 54 38 74 76 58 100 56 44 76 84 78 102 58 80 78 40 82 80 42 94 82 52 60 84 104 62 60 106 64 62 108 86 64 110 88 46 86 90 48 88 92 50 90 112 66 92 114 68 66 116 70 68 118 94 70 96 120 72 98 96 54 72 122 74 100 98 56 74 124 76 102 100 58 76 126 84 128 102 78 130 78 80 132 80 82 134 82 94 84 136 104 60 104 106 62 106 108 64 108 110 86 110 138 88 86 140 90 88 142 92 90 144 112 92 146 114 66 112 116 68 114 118 70 116 148 94 118 96 150 120 72 120 122 98 152 96 74 122 124 100 154 98 76 124 126 102 156 100 84 126 136 128 158 102 130 128 78 132 130 80 134 132 82 148 134 94 104 136 160 106 104 162 108 106 164 110 108 166 138 110 168 140 86 138 142 88 140 144 90 142 146 92 144 170 112 146 172 114 112 174 116 114 176 118 116 178 148 118 150 180 120 152 150 96 120 182 122 154 152 98 122 184 124 156 154 100 124 186 126 158 156 102 126 188 136 190 158 128 192 128 130 194 130 132 196 132 134 198 134 148 136 200 160 104 160 162 106 162 164 108 164 166 110 166 168 138 168 202 140 138 204 142 140 206 144 142 208 146 144 210 170 146 212 172 112 170 174 114 172 176 116 174 178 118 176 214 148 178 150 216 180 120 180 182 152 218 150 122 182 184 154 220 152 124 184 186 156 222 154 126 186 188 158 224 156 136 188 200 190 226 158 192 190 128 194 192 130 196 194 132 198 196 134 214 198 148 160 200 228 162 160 230 164 162 232 166 164 234 168 166 236 202 168 238 204 138 202 206 140 204 208 142 206 210 144 208 212 146 210 240 170 212 242 172 170 244 174 172 246 176 174 248 178 176 250 214 178 216 252 180 218 216 150 180 254 182 220 218 152 182 256 184 222 220 154 184 258 186 224 222 156 186 260 188 226 224 158 188 262 200 264 226 190 266 190 192 268 192 194 270 194 196 272 196 198 274 198 214 200 276 228 160 228 230 162 230 232 164 232 234 166 234 236 168 236 238 202 238 278 204 202 280 206 204 282 208 206 284 210 208 286 212 210 288 240 212 290 242 170 240 244 172 242 246 174 244 248 176 246 250 178 248 292 214 250 216 294 252 180 252 254 218 296 216 182 254 256 220 298 218 184 256 258 222 300 220 186 258 260 224 302 222 188 260 262 226 304 224 200 262 276 264 306 226 266 264 190 268 266 192 270 268 194 272 270 196 274 272 198 292 274 214 228 276 308 230 228 310 232 230 312 234 232 314 236 234 316 238 236 318 278 238 320 280 202 278 282 204 280 284 206 282 286 208 284 288 210 286 290 212 288 322 240 290 324 242 240 326 244 242 328 246 244 330 248 246 332 250 248 334 292 250 294 336 252 296 294 216 252 338 254 298 296 218 254 340 256 300 298 220 256 342 258 302 300 222 258 344 260 304 302 224 260 346 262 306 304 226 262 348 276 350 306 264 352 264 266 354 266 268 356 268 270 358 270 272 360 272 274 362 274 292 276 364 308 228 308 310 230 310 312 232 312 314 234 314 316 236 316 318 238 318 320 278 320 366 280 278 368 282 280 370 284 282 372 286 284 374 288 286 376 290 288 378 322 290 380 324 240 322 326 242 324 328 244 326 330 246 328 332 248 330 334 250 332 382 292 334 294 384 336 252 336 338 296 386 294 254 338 340 298 388 296 256 340 342 300 390 298 258 342 344 302 392 300 260 344 346 304 394 302 262 346 348 306 396 304 276 348 364 350 398 306 352 350 264 354 352 266 356 354 268 358 356 270 360 358 272 362 360 274 382 362 292 308 364 400 310 308 402 312 310 404 314 312 406 316 314 408 318 316 410 320 318 412 366 320 414 368 278 366 370 280 368 372 282 370 374 284 372 376 286 374 378 288 376 380 290 378 416 322 380 418 324 322 420 326 324 422 328 326 424 330 328 426 332 330 428 334 332 430 382 334 384 432 336 386 384 294 336 434 338 388 386 296 338 436 340 390 388 298 340 438 342 392 390 300 342 440 344 394 392 302 344 442 346 396 394 304 346 444 348 398 396 306 348 446 364 448 398 350 450 350 352 452 352 354 454 354 356 456 356 358 458 358 360 460 360 362 462 362 382 364 464 400 308 400 402 310 402 404 312 404 406 314 406 408 316 408 410 318 410 412 320 412 414 366 414 466 368 366 468 370 368 470 372 370 472 374 372 474 376 374 476 378 376 478 380 378 480 416 380 482 418 322 416 420 324 418 422 326 420 424 328 422 426 330 424 428 332 426 430 334 428 484 382 430 384 486 432 336 432 434 386 488 384 338 434 436 388 490 386 340 436 438 390 492 388 342 438 440 392 494 390 344 440 442 394 496 392 346 442 444 396 498 394 348 444 446 398 500 396 364 446 464 448 502 398 450 448 350 452 450 352 454 452 354 456 454 356 458 456 358 460 458 360 462 460 362 484 462 382 400 464 504 402 400 506 404 402 508 406 404 510 408 406 512 410 408 514 412 410 516 414 412 518 466 414 520 468 366 466 470 368 468 472 370 470 474 372 472 476 374 474 478 376 476 480 378 478 482 380 480 522 416 482 524 418 416 526 420 418 528 422 420 530 424 422 532 426 424 534 428 426 536 430 428 538 484 430 486 540 432 488 486 384 432 542 434 490 488 386 434 544 436 492 490 388 436 546 438 494 492 390 438 548 440 496 494 392 440 550 442 498 496 394 442 552 444 500 498 396 444 554 446 502 500 398 446 556 464 558 502 448 560 448 450 562 450 452 564 452 454 566 454 456 568 456 458 570 458 460 572 460 462 574 462 484 464 576 504 400 504 506 402 506 508 404 508 510 406 510 512 408 512 514 410 514 516 412 516 518 414 518 520 466 520 578 468 466 580 470 468 582 472 470 584 474 472 586 476 474 588 478 476 590 480 478 592 482 480 594 522 482 596 524 416 522 526 418 524 528 420 526 530 422 528 532 424 530 534 426 532 536 428 534 538 430 536 598 484 538 486 600 540 432 540 542 488 602 486 434 542 544 490 604 488 436 544 546 492 606 490 438 546 548 494 608 492 440 548 550 496 610 494 442 550 552 498 612 496 444 552 554 500 614 498 446 554 556 502 616 500 464 556 576 558 618 502 560 558 448 562 560 450 564 562 452 566 564 454 568 566 456 570 568 458 572 570 460 574 572 462 598 574 484 504 576 620 506 504 622 508 506 624 510 508 626 512 510 628 514 512 630 516 514 632 518 516 634 520 518 636 578 520 638 580 466 578 582 468 580 584 470 582 586 472 584 588 474 586 590 476 588 592 478 590 594 480 592 596 482 594 640 522 596 642 524 522 644 526 524 646 528 526 648 530 528 650 532 530 652 534 532 654 536 534 656 538 536 658 598 538 600 660 540 602 600 486 540 662 542 604 602 488 542 664 544 606 604 490 544 666 546 608 606 492 546 668 548 610 608 494 548 670 550 612 610 496 550 672 552 614 612 498 552 674 554 616 614 500 554 676 556 618 616 502 556 678 576 680 618 558 682 558 560 684 560 562 686 562 564 688 564 566 690 566 568 692 568 570 694 570 572 696 572 574 698 574 598 576 700 620 504 620 622 506 622 624 508 624 626 510 626 628 512 628 630 514 630 632 516 632 634 518 634 636 520 636 638 578 638 702 580 578 704 582 580 706 584 582 708 586 584 710 588 586 712 590 588 714 592 590 716 594 592 718 596 594 720 640 596 722 642 522 640 644 524 642 646 526 644 648 528 646 650 530 648 652 532 650 654 534 652 656 536 654 658 538 656 724 598 658 600 726 660 540 660 662 602 728 600 542 662 664 604 730 602 544 664 666 606 732 604 546 666 668 608 734 606 548 668 670 610 736 608 550 670 672 612 738 610 552 672 674 614 740 612 554 674 676 616 742 614 556 676 678 618 744 616 576 678 700 680 746 618 682 680 558 684 682 560 686 684 562 688 686 564 690 688 566 692 690 568 694 692 570 696 694 572 698 696 574 724 698 598 620 700 748 622 620 750 624 622 752 626 624 754 628 626 756 630 628 758 632 630 760 634 632 762 636 634 764 638 636 766 702 638 768 704 578 702 706 580 704 708 582 706 710 584 708 712 586 710 714 588 712 716 590 714 718 592 716 720 594 718 722 596 720 770 640 722 772 642 640 774 644 642 776 646 644 778 648 646 780 650 648 782 652 650 784 654 652 786 656 654 788 658 656 790 724 658 726 792 660 728 726 600 660 794 662 730 728 602 662 796 664 732 730 604 664 798 666 734 732 606 666 800 668 736 734 608 668 802 670 738 736 610 670 804 672 740 738 612 672 806 674 742 740 614 674 808 676 744 742 616 676 810 678 746 744 618 678 812 700 814 746 680 816 680 682 818 682 684 820 684 686 822 686 688 824 688 690 826 690 692 828 692 694 830 694 696 832 696 698 834 698 724 700 836 748 620 748 750 622 750 752 624 752 754 626 754 756 628 756 758 630 758 760 632 760 762 634 762 764 636 764 766 638 766 768 702 768 838 704 702 840 706 704 842 708 706 844 710 708 846 712 710 848 714 712 850 716 714 852 718 716 854 720 718 856 722 720 858 770 722 860 772 640 770 774 642 772 776 644 774 778 646 776 780 648 778 782 650 780 784 652 782 786 654 784 788 656 786 790 658 788 862 724 790 726 864 792 660 792 794 728 866 726 662 794 796 730 868 728 664 796 798 732 870 730 666 798 800 734 872 732 668 800 802 736 874 734 670 802 804 738 876 736 672 804 806 740 878 738 674 806 808 742 880 740 676 808 810 744 882 742 678 810 812 746 884 744 700 812 836 814 886 746 816 814 680 818 816 682 820 818 684 822 820 686 824 822 688 826 824 690 828 826 692 830 828 694 832 830 696 834 832 698 862 834 724 748 836 888 750 748 890 752 750 892 754 752 894 756 754 896 758 756 898 760 758 900 762 760 902 764 762 904 766 764 906 768 766 908 838 768 910 840 702 838 842 704 840 844 706 842 846 708 844 848 710 846 850 712 848 852 714 850 854 716 852 856 718 854 858 720 856 860 722 858 912 770 860 914 772 770 916 774 772 918 776 774 920 778 776 922 780 778 924 782 780 926 784 782 928 786 784 930 788 786 932 790 788 934 862 790 864 936 792 866 864 726 792 938 794 868 866 728 794 940 796 870 868 730 796 942 798 872 870 732 798 944 800 874 872 734 800 946 802 876 874 736 802 948 804 878 876 738 804 950 806 880 878 740 806 952 808 882 880 742 808 954 810 884 882 744 810 956 812 886 884 746 812 958 836 960 886 814 962 814 816 964 816 818 966 818 820 968 820 822 970 822 824 972 824 826 974 826 828 976 828 830 978 830 832 980 832 834 982 834 862 836 984 888 748 888 890 750 890 892 752 892 894 754 894 896 756 896 898 758 898 900 760 900 902 762 902 904 764 904 906 766 906 908 768 908 910 838 910 986 840 838 988 842 840 990 844 842 992 846 844 994 848 846 996 850 848 998 852 850 1000 854 852 1002 856 854 1004 858 856 1006 860 858 1008 912 860 1010 914 770 912 916 772 914 918 774 916 920 776 918 922 778 920 924 780 922 926 782 924 928 784 926 930 786 928 932 788 930 934 790 932 1012 862 934 864 1014 936 792 936 938 866 1016 864 794 938 940 868 1018 866 796 940 942 870 1020 868 798 942 944 872 1022 870 800 944 946 874 1024 872 802 946 948 876 1026 874 804 948 950 878 1028 876 806 950 952 880 1030 878 808 952 954 882 1032 880 810 954 956 884 1034 882 812 956 958 886 1036 884 836 958 984 960 1038 886 962 960 814 964 962 816 966 964 818 968 966 820 970 968 822 972 970 824 974 972 826 976 974 828 978 976 830 980 978 832 982 980 834 1012 982 862 888 984 1040 890 888 1042 892 890 1044 894 892 1046 896 894 1048 898 896 1050 900 898 1052 902 900 1054 904 902 1056 906 904 1058 908 906 1060 910 908 1062 986 910 1064 988 838 986 990 840 988 992 842 990 994 844 992 996 846 994 998 848 996 1000 850 998 1002 852 1000 1004 854 1002 1006 856 1004 1008 858 1006 1010 860 1008 1066 912 1010 1068 914 912 1070 916 914 1072 918 916 1074 920 918 1076 922 920 1078 924 922 1080 926 924 1082 928 926 1084 930 928 1086 932 930 1088 934 932 1090 1012 934 1014 1092 936 1016 1014 864 936 1094 938 1018 1016 866 938 1096 940 1020 1018 868 940 1098 942 1022 1020 870 942 1100 944 1024 1022 872 944 1102 946 1026 1024 874 946 1104 948 1028 1026 876 948 1106 950 1030 1028 878 950 1108 952 1032 1030 880 952 1110 954 1034 1032 882 954 1112 956 1036 1034 884 956 1114 958 1038 1036 886 958 1116 984 1118 1038 960 1120 960 962 1122 962 964 1124 964 966 1126 966 968 1128 968 970 1130 970 972 1132 972 974 1134 974 976 1136 976 978 1138 978 980 1140 980 982 1142 982 1012 984 1144 1040 888 1040 1042 890 1042 1044 892 1044 1046 894 1046 1048 896 1048 1050 898 1050 1052 900 1052 1054 902 1054 1056 904 1056 1058 906 1058 1060 908 1060 1062 910 1062 1064 986 1064 1146 988 986 1148 990 988 1150 992 990 1152 994 992 1154 996 994 1156 998 996 1158 1000 998 1160 1002 1000 1162 1004 1002 1164 1006 1004 1166 1008 1006 1168 1010 1008 1170 1066 1010 1172 1068 912 1066 1070 914 1068 1072 916 1070 1074 918 1072 1076 920 1074 1078 922 1076 1080 924 1078 1082 926 1080 1084 928 1082 1086 930 1084 1088 932 1086 1090 934 1088 1174 1012 1090 1014 1176 1092 936 1092 1094 1016 1178 1014 938 1094 1096 1018 1180 1016 940 1096 1098 1020 1182 1018 942 1098 1100 1022 1184 1020 944 1100 1102 1024 1186 1022 946 1102 1104 1026 1188 1024 948 1104 1106 1028 1190 1026 950 1106 1108 1030 1192 1028 952 1108 1110 1032 1194 1030 954 1110 1112 1034 1196 1032 956 1112 1114 1036 1198 1034 958 1114 1116 1038 1200 1036 984 1116 1144 1118 1202 1038 1120 1118 960 1122 1120 962 1124 1122 964 1126 1124 966 1128 1126 968 1130 1128 970 1132 1130 972 1134 1132 974 1136 1134 976 1138 1136 978 1140 1138 980 1142 1140 982 1174 1142 1012 1148 986 1146 1150 988 1148 1152 990 1150 1154 992 1152 1156 994 1154 1158 996 1156 1160 998 1158 1162 1000 1160 1164 1002 1162 1166 1004 1164 1168 1006 1166 1170 1008 1168 1172 1010 1170 1204 1066 1172 1206 1068 1066 1208 1070 1068 1210 1072 1070 1212 1074 1072 1214 1076 1074 1216 1078 1076 1218 1080 1078 1220 1082 1080 1222 1084 1082 1224 1086 1084 1226 1088 1086 1228 1090 1088 1230 1174 1090 1176 1232 1092 1178 1176 1014 1092 1234 1094 1180 1178 1016 1094 1236 1096 1182 1180 1018 1096 1238 1098 1184 1182 1020 1098 1240 1100 1186 1184 1022 1100 1242 1102 1188 1186 1024 1102 1244 1104 1190 1188 1026 1104 1246 1106 1192 1190 1028 1106 1248 1108 1194 1192 1030 1108 1250 1110 1196 1194 1032 1110 1252 1112 1198 1196 1034 1112 1254 1114 1200 1198 1036 1114 1256 1116 1202 1200 1038 1116 1258 1144 1260 1202 1118 1262 1118 1120 1264 1120 1122 1266 1122 1124 1268 1124 1126 1270 1126 1128 1272 1128 1130 1274 1130 1132 1276 1132 1134 1278 1134 1136 1280 1136 1138 1282 1138 1140 1284 1140 1142 1286 1142 1174 1148 1146 1288 1150 1148 1290 1152 1150 1292 1154 1152 1294 1156 1154 1296 1158 1156 1298 1160 1158 1300 1162 1160 1302 1164 1162 1304 1166 1164 1306 1168 1166 1308 1170 1168 1310 1172 1170 1312 1204 1172 1314 1206 1066 1204 1208 1068 1206 1210 1070 1208 1212 1072 1210 1214 1074 1212 1216 1076 1214 1218 1078 1216 1220 1080 1218 1222 1082 1220 1224 1084 1222 1226 1086 1224 1228 1088 1226 1230 1090 1228 1316 1174 1230 1176 1318 1232 1092 1232 1234 1178 1320 1176 1094 1234 1236 1180 1322 1178 1096 1236 1238 1182 1324 1180 1098 1238 1240 1184 1326 1182 1100 1240 1242 1186 1328 1184 1102 1242 1244 1188 1330 1186 1104 1244 1246 1190 1332 1188 1106 1246 1248 1192 1334 1190 1108 1248 1250 1194 1336 1192 1110 1250 1252 1196 1338 1194 1112 1252 1254 1198 1340 1196 1114 1254 1256 1200 1342 1198 1116 1256 1258 1202 1344 1200 1260 1346 1202 1262 1260 1118 1264 1262 1120 1266 1264 1122 1268 1266 1124 1270 1268 1126 1272 1270 1128 1274 1272 1130 1276 1274 1132 1278 1276 1134 1280 1278 1136 1282 1280 1138 1284 1282 1140 1286 1284 1142 1316 1286 1174 1290 1148 1288 1292 1150 1290 1294 1152 1292 1296 1154 1294 1298 1156 1296 1300 1158 1298 1302 1160 1300 1304 1162 1302 1306 1164 1304 1308 1166 1306 1310 1168 1308 1312 1170 1310 1314 1172 1312 1348 1204 1314 1350 1206 1204 1352 1208 1206 1354 1210 1208 1356 1212 1210 1358 1214 1212 1360 1216 1214 1362 1218 1216 1364 1220 1218 1366 1222 1220 1368 1224 1222 1370 1226 1224 1372 1228 1226 1374 1230 1228 1376 1316 1230 1318 1378 1232 1320 1318 1176 1232 1380 1234 1322 1320 1178 1234 1382 1236 1324 1322 1180 1236 1384 1238 1326 1324 1182 1238 1386 1240 1328 1326 1184 1240 1388 1242 1330 1328 1186 1242 1390 1244 1332 1330 1188 1244 1392 1246 1334 1332 1190 1246 1394 1248 1336 1334 1192 1248 1396 1250 1338 1336 1194 1250 1398 1252 1340 1338 1196 1252 1400 1254 1342 1340 1198 1254 1402 1256 1344 1342 1200 1256 1404 1258 1346 1344 1202 1406 1346 1260 1408 1260 1262 1410 1262 1264 1412 1264 1266 1414 1266 1268 1416 1268 1270 1418 1270 1272 1420 1272 1274 1422 1274 1276 1424 1276 1278 1426 1278 1280 1428 1280 1282 1430 1282 1284 1432 1284 1286 1434 1286 1316 1290 1288 1436 1292 1290 1438 1294 1292 1440 1296 1294 1442 1298 1296 1444 1300 1298 1446 1302 1300 1448 1304 1302 1450 1306 1304 1452 1308 1306 1454 1310 1308 1456 1312 1310 1458 1314 1312 1460 1348 1314 1462 1350 1204 1348 1352 1206 1350 1354 1208 1352 1356 1210 1354 1358 1212 1356 1360 1214 1358 1362 1216 1360 1364 1218 1362 1366 1220 1364 1368 1222 1366 1370 1224 1368 1372 1226 1370 1374 1228 1372 1376 1230 1374 1464 1316 1376 1318 1466 1378 1232 1378 1380 1320 1468 1318 1234 1380 1382 1322 1470 1320 1236 1382 1384 1324 1472 1322 1238 1384 1386 1326 1474 1324 1240 1386 1388 1328 1476 1326 1242 1388 1390 1330 1478 1328 1244 1390 1392 1332 1480 1330 1246 1392 1394 1334 1482 1332 1248 1394 1396 1336 1484 1334 1250 1396 1398 1338 1486 1336 1252 1398 1400 1340 1488 1338 1254 1400 1402 1342 1490 1340 1256 1402 1404 1344 1492 1342 1346 1494 1344 1406 1496 1346 1408 1406 1260 1410 1408 1262 1412 1410 1264 1414 1412 1266 1416 1414 1268 1418 1416 1270 1420 1418 1272 1422 1420 1274 1424 1422 1276 1426 1424 1278 1428 1426 1280 1430 1428 1282 1432 1430 1284 1434 1432 1286 1464 1434 1316 1438 1290 1436 1440 1292 1438 1442 1294 1440 1444 1296 1442 1446 1298 1444 1448 1300 1446 1450 1302 1448 1452 1304 1450 1454 1306 1452 1456 1308 1454 1458 1310 1456 1460 1312 1458 1462 1314 1460 1498 1348 1462 1500 1350 1348 1502 1352 1350 1504 1354 1352 1506 1356 1354 1508 1358 1356 1510 1360 1358 1512 1362 1360 1514 1364 1362 1516 1366 1364 1518 1368 1366 1520 1370 1368 1522 1372 1370 1524 1374 1372 1526 1376 1374 1528 1464 1376 1466 1530 1378 1468 1466 1318 1378 1532 1380 1470 1468 1320 1380 1534 1382 1472 1470 1322 1382 1536 1384 1474 1472 1324 1384 1538 1386 1476 1474 1326 1386 1540 1388 1478 1476 1328 1388 1542 1390 1480 1478 1330 1390 1544 1392 1482 1480 1332 1392 1546 1394 1484 1482 1334 1394 1548 1396 1486 1484 1336 1396 1550 1398 1488 1486 1338 1398 1552 1400 1490 1488 1340 1400 1554 1402 1492 1490 1342 1402 1556 1404 1494 1492 1344 1496 1494 1346 1558 1496 1406 1560 1406 1408 1562 1408 1410 1564 1410 1412 1566 1412 1414 1568 1414 1416 1570 1416 1418 1572 1418 1420 1574 1420 1422 1576 1422 1424 1578 1424 1426 1580 1426 1428 1582 1428 1430 1584 1430 1432 1586 1432 1434 1588 1434 1464 1438 1436 1590 1440 1438 1592 1442 1440 1594 1444 1442 1596 1446 1444 1598 1448 1446 1600 1450 1448 1602 1452 1450 1604 1454 1452 1606 1456 1454 1608 1458 1456 1610 1460 1458 1612 1462 1460 1614 1498 1462 1616 1500 1348 1498 1502 1350 1500 1504 1352 1502 1506 1354 1504 1508 1356 1506 1510 1358 1508 1512 1360 1510 1514 1362 1512 1516 1364 1514 1518 1366 1516 1520 1368 1518 1522 1370 1520 1524 1372 1522 1526 1374 1524 1528 1376 1526 1618 1464 1528 1466 1620 1530 1378 1530 1532 1468 1622 1466 1380 1532 1534 1470 1624 1468 1382 1534 1536 1472 1626 1470 1384 1536 1538 1474 1628 1472 1386 1538 1540 1476 1630 1474 1388 1540 1542 1478 1632 1476 1390 1542 1544 1480 1634 1478 1392 1544 1546 1482 1636 1480 1394 1546 1548 1484 1638 1482 1396 1548 1550 1486 1640 1484 1398 1550 1552 1488 1642 1486 1400 1552 1554 1490 1644 1488 1402 1554 1556 1492 1646 1490 1494 1648 1492 1496 1650 1494 1558 1652 1496 1560 1558 1406 1562 1560 1408 1564 1562 1410 1566 1564 1412 1568 1566 1414 1570 1568 1416 1572 1570 1418 1574 1572 1420 1576 1574 1422 1578 1576 1424 1580 1578 1426 1582 1580 1428 1584 1582 1430 1586 1584 1432 1588 1586 1434 1618 1588 1464 1592 1438 1590 1594 1440 1592 1596 1442 1594 1598 1444 1596 1600 1446 1598 1602 1448 1600 1604 1450 1602 1606 1452 1604 1608 1454 1606 1610 1456 1608 1612 1458 1610 1614 1460 1612 1616 1462 1614 1654 1498 1616 1656 1500 1498 1658 1502 1500 1660 1504 1502 1662 1506 1504 1664 1508 1506 1666 1510 1508 1668 1512 1510 1670 1514 1512 1672 1516 1514 1674 1518 1516 1676 1520 1518 1678 1522 1520 1680 1524 1522 1682 1526 1524 1684 1528 1526 1686 1618 1528 1620 1688 1530 1622 1620 1466 1530 1690 1532 1624 1622 1468 1532 1692 1534 1626 1624 1470 1534 1694 1536 1628 1626 1472 1536 1696 1538 1630 1628 1474 1538 1698 1540 1632 1630 1476 1540 1700 1542 1634 1632 1478 1542 1702 1544 1636 1634 1480 1544 1704 1546 1638 1636 1482 1546 1706 1548 1640 1638 1484 1548 1708 1550 1642 1640 1486 1550 1710 1552 1644 1642 1488 1552 1712 1554 1646 1644 1490 1554 1714 1556 1648 1646 1492 1650 1648 1494 1652 1650 1496 1716 1652 1558 1718 1558 1560 1720 1560 1562 1722 1562 1564 1724 1564 1566 1726 1566 1568 1728 1568 1570 1730 1570 1572 1732 1572 1574 1734 1574 1576 1736 1576 1578 1738 1578 1580 1740 1580 1582 1742 1582 1584 1744 1584 1586 1746 1586 1588 1748 1588 1618 1592 1590 1750 1594 1592 1752 1596 1594 1754 1598 1596 1756 1600 1598 1758 1602 1600 1760 1604 1602 1762 1606 1604 1764 1608 1606 1766 1610 1608 1768 1612 1610 1770 1614 1612 1772 1616 1614 1774 1654 1616 1776 1656 1498 1654 1658 1500 1656 1660 1502 1658 1662 1504 1660 1664 1506 1662 1666 1508 1664 1668 1510 1666 1670 1512 1668 1672 1514 1670 1674 1516 1672 1676 1518 1674 1678 1520 1676 1680 1522 1678 1682 1524 1680 1684 1526 1682 1686 1528 1684 1778 1618 1686 1620 1780 1688 1530 1688 1690 1622 1782 1620 1532 1690 1692 1624 1784 1622 1534 1692 1694 1626 1786 1624 1536 1694 1696 1628 1788 1626 1538 1696 1698 1630 1790 1628 1540 1698 1700 1632 1792 1630 1542 1700 1702 1634 1794 1632 1544 1702 1704 1636 1796 1634 1546 1704 1706 1638 1798 1636 1548 1706 1708 1640 1800 1638 1550 1708 1710 1642 1802 1640 1552 1710 1712 1644 1804 1642 1554 1712 1714 1646 1806 1644 1648 1808 1646 1650 1810 1648 1652 1812 1650 1716 1814 1652 1718 1716 1558 1720 1718 1560 1722 1720 1562 1724 1722 1564 1726 1724 1566 1728 1726 1568 1730 1728 1570 1732 1730 1572 1734 1732 1574 1736 1734 1576 1738 1736 1578 1740 1738 1580 1742 1740 1582 1744 1742 1584 1746 1744 1586 1748 1746 1588 1778 1748 1618 1752 1592 1750 1754 1594 1752 1756 1596 1754 1758 1598 1756 1760 1600 1758 1762 1602 1760 1764 1604 1762 1766 1606 1764 1768 1608 1766 1770 1610 1768 1772 1612 1770 1774 1614 1772 1776 1616 1774 1816 1654 1776 1818 1656 1654 1820 1658 1656 1822 1660 1658 1824 1662 1660 1826 1664 1662 1828 1666 1664 1830 1668 1666 1832 1670 1668 1834 1672 1670 1836 1674 1672 1838 1676 1674 1840 1678 1676 1842 1680 1678 1844 1682 1680 1846 1684 1682 1848 1686 1684 1850 1778 1686 1780 1852 1688 1782 1780 1620 1688 1854 1690 1784 1782 1622 1690 1856 1692 1786 1784 1624 1692 1858 1694 1788 1786 1626 1694 1860 1696 1790 1788 1628 1696 1862 1698 1792 1790 1630 1698 1864 1700 1794 1792 1632 1700 1866 1702 1796 1794 1634 1702 1868 1704 1798 1796 1636 1704 1870 1706 1800 1798 1638 1706 1872 1708 1802 1800 1640 1708 1874 1710 1804 1802 1642 1710 1876 1712 1806 1804 1644 1712 1878 1714 1808 1806 1646 1810 1808 1648 1812 1810 1650 1814 1812 1652 1880 1814 1716 1882 1716 1718 1884 1718 1720 1886 1720 1722 1888 1722 1724 1890 1724 1726 1892 1726 1728 1894 1728 1730 1896 1730 1732 1898 1732 1734 1900 1734 1736 1902 1736 1738 1904 1738 1740 1906 1740 1742 1908 1742 1744 1910 1744 1746 1912 1746 1748 1914 1748 1778 1752 1750 1916 1754 1752 1918 1756 1754 1920 1758 1756 1922 1760 1758 1924 1762 1760 1926 1764 1762 1928 1766 1764 1930 1768 1766 1932 1770 1768 1934 1772 1770 1936 1774 1772 1938 1776 1774 1940 1816 1776 1942 1818 1654 1816 1820 1656 1818 1822 1658 1820 1824 1660 1822 1826 1662 1824 1828 1664 1826 1830 1666 1828 1832 1668 1830 1834 1670 1832 1836 1672 1834 1838 1674 1836 1840 1676 1838 1842 1678 1840 1844 1680 1842 1846 1682 1844 1848 1684 1846 1850 1686 1848 1944 1778 1850 1780 1946 1852 1688 1852 1854 1782 1948 1780 1690 1854 1856 1784 1950 1782 1692 1856 1858 1786 1952 1784 1694 1858 1860 1788 1954 1786 1696 1860 1862 1790 1956 1788 1698 1862 1864 1792 1958 1790 1700 1864 1866 1794 1960 1792 1702 1866 1868 1796 1962 1794 1704 1868 1870 1798 1964 1796 1706 1870 1872 1800 1966 1798 1708 1872 1874 1802 1968 1800 1710 1874 1876 1804 1970 1802 1712 1876 1878 1806 1972 1804 1808 1974 1806 1810 1976 1808 1812 1978 1810 1814 1980 1812 1880 1982 1814 1882 1880 1716 1884 1882 1718 1886 1884 1720 1888 1886 1722 1890 1888 1724 1892 1890 1726 1894 1892 1728 1896 1894 1730 1898 1896 1732 1900 1898 1734 1902 1900 1736 1904 1902 1738 1906 1904 1740 1908 1906 1742 1910 1908 1744 1912 1910 1746 1914 1912 1748 1944 1914 1778 1918 1752 1916 1920 1754 1918 1922 1756 1920 1924 1758 1922 1926 1760 1924 1928 1762 1926 1930 1764 1928 1932 1766 1930 1934 1768 1932 1936 1770 1934 1938 1772 1936 1940 1774 1938 1942 1776 1940 1946 1984 1852 1948 1946 1780 1852 1986 1854 1950 1948 1782 1854 1988 1856 1952 1950 1784 1856 1990 1858 1954 1952 1786 1858 1992 1860 1956 1954 1788 1860 1994 1862 1958 1956 1790 1862 1996 1864 1960 1958 1792 1864 1998 1866 1962 1960 1794 1866 2000 1868 1964 1962 1796 1868 2002 1870 1966 1964 1798 1870 2004 1872 1968 1966 1800 1872 2006 1874 1970 1968 1802 1874 2008 1876 1972 1970 1804 1876 2010 1878 1974 1972 1806 1976 1974 1808 1978 1976 1810 1980 1978 1812 1982 1980 1814 2012 1982 1880 2014 1880 1882 2016 1882 1884 2018 1884 1886 2020 1886 1888 2022 1888 1890 2024 1890 1892 2026 1892 1894 2028 1894 1896 2030 1896 1898 2032 1898 1900 2034 1900 1902 2036 1902 1904 2038 1904 1906 2040 1906 1908 2042 1908 1910 2044 1910 1912 2046 1912 1914 2048 1914 1944 1918 1916 2050 1920 1918 2052 1922 1920 2054 1924 1922 2056 1926 1924 2058 1928 1926 2060 1930 1928 2062 1932 1930 2064 1934 1932 2066 1936 1934 2068 1938 1936 2070 1940 1938 2072 1942 1940 2074 1946 2076 1984 1852 1984 1986 1948 2078 1946 1854 1986 1988 1950 2080 1948 1856 1988 1990 1952 2082 1950 1858 1990 1992 1954 2084 1952 1860 1992 1994 1956 2086 1954 1862 1994 1996 1958 2088 1956 1864 1996 1998 1960 2090 1958 1866 1998 2000 1962 2092 1960 1868 2000 2002 1964 2094 1962 1870 2002 2004 1966 2096 1964 1872 2004 2006 1968 2098 1966 1874 2006 2008 1970 2100 1968 1876 2008 2010 1972 2102 1970 1974 2104 1972 1976 2106 1974 1978 2108 1976 1980 2110 1978 1982 2112 1980 2012 2114 1982 2014 2012 1880 2016 2014 1882 2018 2016 1884 2020 2018 1886 2022 2020 1888 2024 2022 1890 2026 2024 1892 2028 2026 1894 2030 2028 1896 2032 2030 1898 2034 2032 1900 2036 2034 1902 2038 2036 1904 2040 2038 1906 2042 2040 1908 2044 2042 1910 2046 2044 1912 2048 2046 1914 2052 1918 2050 2054 1920 2052 2056 1922 2054 2058 1924 2056 2060 1926 2058 2062 1928 2060 2064 1930 2062 2066 1932 2064 2068 1934 2066 2070 1936 2068 2072 1938 2070 2074 1940 2072 2076 2116 1984 2078 2076 1946 1984 2118 1986 2080 2078 1948 1986 2120 1988 2082 2080 1950 1988 2122 1990 2084 2082 1952 1990 2124 1992 2086 2084 1954 1992 2126 1994 2088 2086 1956 1994 2128 1996 2090 2088 1958 1996 2130 1998 2092 2090 1960 1998 2132 2000 2094 2092 1962 2000 2134 2002 2096 2094 1964 2002 2136 2004 2098 2096 1966 2004 2138 2006 2100 2098 1968 2006 2140 2008 2102 2100 1970 2008 2142 2010 2104 2102 1972 2106 2104 1974 2108 2106 1976 2110 2108 1978 2112 2110 1980 2114 2112 1982 2144 2114 2012 2146 2012 2014 2148 2014 2016 2150 2016 2018 2152 2018 2020 2154 2020 2022 2156 2022 2024 2158 2024 2026 2160 2026 2028 2162 2028 2030 2164 2030 2032 2166 2032 2034 2168 2034 2036 2170 2036 2038 2172 2038 2040 2174 2040 2042 2176 2042 2044 2178 2044 2046 2180 2046 2048 2052 2050 2182 2054 2052 2184 2056 2054 2186 2058 2056 2188 2060 2058 2190 2062 2060 2192 2064 2062 2194 2066 2064 2196 2068 2066 2198 2070 2068 2200 2072 2070 2202 2074 2072 2204 2076 2206 2116 1984 2116 2118 2078 2208 2076 1986 2118 2120 2080 2210 2078 1988 2120 2122 2082 2212 2080 1990 2122 2124 2084 2214 2082 1992 2124 2126 2086 2216 2084 1994 2126 2128 2088 2218 2086 1996 2128 2130 2090 2220 2088 1998 2130 2132 2092 2222 2090 2000 2132 2134 2094 2224 2092 2002 2134 2136 2096 2226 2094 2004 2136 2138 2098 2228 2096 2006 2138 2140 2100 2230 2098 2008 2140 2142 2102 2232 2100 2104 2234 2102 2106 2236 2104 2108 2238 2106 2110 2240 2108 2112 2242 2110 2114 2244 2112 2144 2246 2114 2146 2144 2012 2148 2146 2014 2150 2148 2016 2152 2150 2018 2154 2152 2020 2156 2154 2022 2158 2156 2024 2160 2158 2026 2162 2160 2028 2164 2162 2030 2166 2164 2032 2168 2166 2034 2170 2168 2036 2172 2170 2038 2174 2172 2040 2176 2174 2042 2178 2176 2044 2180 2178 2046 2184 2052 2182 2186 2054 2184 2188 2056 2186 2190 2058 2188 2192 2060 2190 2194 2062 2192 2196 2064 2194 2198 2066 2196 2200 2068 2198 2202 2070 2200 2204 2072 2202 2206 2248 2116 2208 2206 2076 2116 2250 2118 2210 2208 2078 2118 2252 2120 2212 2210 2080 2120 2254 2122 2214 2212 2082 2122 2256 2124 2216 2214 2084 2124 2258 2126 2218 2216 2086 2126 2260 2128 2220 2218 2088 2128 2262 2130 2222 2220 2090 2130 2264 2132 2224 2222 2092 2132 2266 2134 2226 2224 2094 2134 2268 2136 2228 2226 2096 2136 2270 2138 2230 2228 2098 2138 2272 2140 2232 2230 2100 2140 2274 2142 2234 2232 2102 2236 2234 2104 2238 2236 2106 2240 2238 2108 2242 2240 2110 2244 2242 2112 2246 2244 2114 2276 2246 2144 2278 2144 2146 2280 2146 2148 2282 2148 2150 2284 2150 2152 2286 2152 2154 2288 2154 2156 2290 2156 2158 2292 2158 2160 2294 2160 2162 2296 2162 2164 2298 2164 2166 2300 2166 2168 2302 2168 2170 2304 2170 2172 2306 2172 2174 2308 2174 2176 2310 2176 2178 2312 2178 2180 2184 2182 2314 2186 2184 2316 2188 2186 2318 2190 2188 2320 2192 2190 2322 2194 2192 2324 2196 2194 2326 2198 2196 2328 2200 2198 2330 2202 2200 2332 2204 2202 2334 2206 2336 2248 2116 2248 2250 2208 2338 2206 2118 2250 2252 2210 2340 2208 2120 2252 2254 2212 2342 2210 2122 2254 2256 2214 2344 2212 2124 2256 2258 2216 2346 2214 2126 2258 2260 2218 2348 2216 2128 2260 2262 2220 2350 2218 2130 2262 2264 2222 2352 2220 2132 2264 2266 2224 2354 2222 2134 2266 2268 2226 2356 2224 2136 2268 2270 2228 2358 2226 2138 2270 2272 2230 2360 2228 2140 2272 2274 2232 2362 2230 2234 2364 2232 2236 2366 2234 2238 2368 2236 2240 2370 2238 2242 2372 2240 2244 2374 2242 2246 2376 2244 2276 2378 2246 2278 2276 2144 2280 2278 2146 2282 2280 2148 2284 2282 2150 2286 2284 2152 2288 2286 2154 2290 2288 2156 2292 2290 2158 2294 2292 2160 2296 2294 2162 2298 2296 2164 2300 2298 2166 2302 2300 2168 2304 2302 2170 2306 2304 2172 2308 2306 2174 2310 2308 2176 2312 2310 2178 2316 2184 2314 2318 2186 2316 2320 2188 2318 2322 2190 2320 2324 2192 2322 2326 2194 2324 2328 2196 2326 2330 2198 2328 2332 2200 2330 2334 2202 2332 2338 2336 2206 2340 2338 2208 2342 2340 2210 2344 2342 2212 2346 2344 2214 2348 2346 2216 2350 2348 2218 2352 2350 2220 2354 2352 2222 2356 2354 2224 2358 2356 2226 2360 2358 2228 2362 2360 2230 2364 2362 2232 2366 2364 2234 2368 2366 2236 2370 2368 2238 2372 2370 2240 2374 2372 2242 2376 2374 2244 2378 2376 2246 2380 2378 2276 2382 2276 2278 2384 2278 2280 2386 2280 2282 2388 2282 2284 2390 2284 2286 2392 2286 2288 2394 2288 2290 2396 2290 2292 2398 2292 2294 2400 2294 2296 2402 2296 2298 2404 2298 2300 2406 2300 2302 2408 2302 2304 2410 2304 2306 2412 2306 2308 2414 2308 2310 2416 2310 2312 2316 2314 2418 2318 2316 2420 2320 2318 2422 2322 2320 2424 2324 2322 2426 2326 2324 2428 2328 2326 2430 2330 2328 2432 2332 2330 2434 2334 2332 2436 2338 2438 2336 2340 2440 2338 2342 2442 2340 2344 2444 2342 2346 2446 2344 2348 2448 2346 2350 2450 2348 2352 2452 2350 2354 2454 2352 2356 2456 2354 2358 2458 2356 2360 2460 2358 2362 2462 2360 2364 2464 2362 2366 2466 2364 2368 2468 2366 2370 2470 2368 2372 2472 2370 2374 2474 2372 2376 2476 2374 2378 2478 2376 2380 2480 2378 2382 2380 2276 2384 2382 2278 2386 2384 2280 2388 2386 2282 2390 2388 2284 2392 2390 2286 2394 2392 2288 2396 2394 2290 2398 2396 2292 2400 2398 2294 2402 2400 2296 2404 2402 2298 2406 2404 2300 2408 2406 2302 2410 2408 2304 2412 2410 2306 2414 2412 2308 2416 2414 2310 2420 2316 2418 2422 2318 2420 2424 2320 2422 2426 2322 2424 2428 2324 2426 2430 2326 2428 2432 2328 2430 2434 2330 2432 2436 2332 2434 2440 2438 2338 2442 2440 2340 2444 2442 2342 2446 2444 2344 2448 2446 2346 2450 2448 2348 2452 2450 2350 2454 2452 2352 2456 2454 2354 2458 2456 2356 2460 2458 2358 2462 2460 2360 2464 2462 2362 2466 2464 2364 2468 2466 2366 2470 2468 2368 2472 2470 2370 2474 2472 2372 2476 2474 2374 2478 2476 2376 2480 2478 2378 2482 2480 2380 2484 2380 2382 2486 2382 2384 2488 2384 2386 2490 2386 2388 2492 2388 2390 2494 2390 2392 2496 2392 2394 2498 2394 2396 2500 2396 2398 2502 2398 2400 2504 2400 2402 2506 2402 2404 2508 2404 2406 2510 2406 2408 2512 2408 2410 2514 2410 2412 2516 2412 2414 2518 2414 2416 2420 2418 2520 2422 2420 2522 2424 2422 2524 2426 2424 2526 2428 2426 2528 2430 2428 2530 2432 2430 2532 2434 2432 2534 2436 2434 2536 2440 2538 2438 2442 2540 2440 2444 2542 2442 2446 2544 2444 2448 2546 2446 2450 2548 2448 2452 2550 2450 2454 2552 2452 2456 2554 2454 2458 2556 2456 2460 2558 2458 2462 2560 2460 2464 2562 2462 2466 2564 2464 2468 2566 2466 2470 2568 2468 2472 2570 2470 2474 2572 2472 2476 2574 2474 2478 2576 2476 2480 2578 2478 2482 2580 2480 2484 2482 2380 2486 2484 2382 2488 2486 2384 2490 2488 2386 2492 2490 2388 2494 2492 2390 2496 2494 2392 2498 2496 2394 2500 2498 2396 2502 2500 2398 2504 2502 2400 2506 2504 2402 2508 2506 2404 2510 2508 2406 2512 2510 2408 2514 2512 2410 2516 2514 2412 2518 2516 2414 2522 2420 2520 2524 2422 2522 2526 2424 2524 2528 2426 2526 2530 2428 2528 2532 2430 2530 2534 2432 2532 2536 2434 2534 2540 2538 2440 2542 2540 2442 2544 2542 2444 2546 2544 2446 2548 2546 2448 2550 2548 2450 2552 2550 2452 2554 2552 2454 2556 2554 2456 2558 2556 2458 2560 2558 2460 2562 2560 2462 2564 2562 2464 2566 2564 2466 2568 2566 2468 2570 2568 2470 2572 2570 2472 2574 2572 2474 2576 2574 2476 2578 2576 2478 2580 2578 2480 2582 2580 2482 2584 2482 2484 2586 2484 2486 2588 2486 2488 2590 2488 2490 2592 2490 2492 2594 2492 2494 2596 2494 2496 2598 2496 2498 2600 2498 2500 2602 2500 2502 2604 2502 2504 2606 2504 2506 2608 2506 2508 2610 2508 2510 2612 2510 2512 2614 2512 2514 2616 2514 2516 2618 2516 2518 2522 2520 2620 2524 2522 2622 2526 2524 2624 2528 2526 2626 2530 2528 2628 2532 2530 2630 2534 2532 2632 2536 2534 2634 2540 2636 2538 2542 2638 2540 2544 2640 2542 2546 2642 2544 2548 2644 2546 2550 2646 2548 2552 2648 2550 2554 2650 2552 2556 2652 2554 2558 2654 2556 2560 2656 2558 2562 2658 2560 2564 2660 2562 2566 2662 2564 2568 2664 2566 2570 2666 2568 2572 2668 2570 2574 2670 2572 2576 2672 2574 2578 2674 2576 2580 2676 2578 2582 2678 2580 2584 2582 2482 2586 2584 2484 2588 2586 2486 2590 2588 2488 2592 2590 2490 2594 2592 2492 2596 2594 2494 2598 2596 2496 2600 2598 2498 2602 2600 2500 2604 2602 2502 2606 2604 2504 2608 2606 2506 2610 2608 2508 2612 2610 2510 2614 2612 2512 2616 2614 2514 2618 2616 2516 2622 2522 2620 2624 2524 2622 2626 2526 2624 2628 2528 2626 2630 2530 2628 2632 2532 2630 2634 2534 2632 2638 2636 2540 2640 2638 2542 2642 2640 2544 2644 2642 2546 2646 2644 2548 2648 2646 2550 2650 2648 2552 2652 2650 2554 2654 2652 2556 2656 2654 2558 2658 2656 2560 2660 2658 2562 2662 2660 2564 2664 2662 2566 2666 2664 2568 2668 2666 2570 2670 2668 2572 2672 2670 2574 2674 2672 2576 2676 2674 2578 2678 2676 2580 2680 2678 2582 2682 2582 2584 2684 2584 2586 2686 2586 2588 2688 2588 2590 2690 2590 2592 2692 2592 2594 2694 2594 2596 2696 2596 2598 2698 2598 2600 2700 2600 2602 2702 2602 2604 2704 2604 2606 2706 2606 2608 2708 2608 2610 2710 2610 2612 2712 2612 2614 2714 2614 2616 2716 2616 2618 2622 2620 2718 2624 2622 2720 2626 2624 2722 2628 2626 2724 2630 2628 2726 2632 2630 2728 2634 2632 2730 2638 2732 2636 2640 2734 2638 2642 2736 2640 2644 2738 2642 2646 2740 2644 2648 2742 2646 2650 2744 2648 2652 2746 2650 2654 2748 2652 2656 2750 2654 2658 2752 2656 2660 2754 2658 2662 2756 2660 2664 2758 2662 2666 2760 2664 2668 2762 2666 2670 2764 2668 2672 2766 2670 2674 2768 2672 2676 2770 2674 2678 2772 2676 2680 2774 2678 2682 2680 2582 2684 2682 2584 2686 2684 2586 2688 2686 2588 2690 2688 2590 2692 2690 2592 2694 2692 2594 2696 2694 2596 2698 2696 2598 2700 2698 2600 2702 2700 2602 2704 2702 2604 2706 2704 2606 2708 2706 2608 2710 2708 2610 2712 2710 2612 2714 2712 2614 2716 2714 2616 2720 2622 2718 2722 2624 2720 2724 2626 2722 2726 2628 2724 2728 2630 2726 2730 2632 2728 2734 2732 2638 2736 2734 2640 2738 2736 2642 2740 2738 2644 2742 2740 2646 2744 2742 2648 2746 2744 2650 2748 2746 2652 2750 2748 2654 2752 2750 2656 2754 2752 2658 2756 2754 2660 2758 2756 2662 2760 2758 2664 2762 2760 2666 2764 2762 2668 2766 2764 2670 2768 2766 2672 2770 2768 2674 2772 2770 2676 2774 2772 2678 2776 2774 2680 2778 2680 2682 2780 2682 2684 2782 2684 2686 2784 2686 2688 2786 2688 2690 2788 2690 2692 2790 2692 2694 2792 2694 2696 2794 2696 2698 2796 2698 2700 2798 2700 2702 2800 2702 2704 2802 2704 2706 2804 2706 2708 2806 2708 2710 2808 2710 2712 2810 2712 2714 2812 2714 2716 2720 2718 2814 2722 2720 2816 2724 2722 2818 2726 2724 2820 2728 2726 2822 2730 2728 2824 2734 2826 2732 2736 2828 2734 2738 2830 2736 2740 2832 2738 2742 2834 2740 2744 2836 2742 2746 2838 2744 2748 2840 2746 2750 2842 2748 2752 2844 2750 2754 2846 2752 2756 2848 2754 2758 2850 2756 2760 2852 2758 2762 2854 2760 2764 2856 2762 2766 2858 2764 2768 2860 2766 2770 2862 2768 2772 2864 2770 2774 2866 2772 2776 2868 2774 2778 2776 2680 2780 2778 2682 2782 2780 2684 2784 2782 2686 2786 2784 2688 2788 2786 2690 2790 2788 2692 2792 2790 2694 2794 2792 2696 2796 2794 2698 2798 2796 2700 2800 2798 2702 2802 2800 2704 2804 2802 2706 2806 2804 2708 2808 2806 2710 2810 2808 2712 2812 2810 2714 2816 2720 2814 2818 2722 2816 2820 2724 2818 2822 2726 2820 2824 2728 2822 2828 2826 2734 2830 2828 2736 2832 2830 2738 2834 2832 2740 2836 2834 2742 2838 2836 2744 2840 2838 2746 2842 2840 2748 2844 2842 2750 2846 2844 2752 2848 2846 2754 2850 2848 2756 2852 2850 2758 2854 2852 2760 2856 2854 2762 2858 2856 2764 2860 2858 2766 2862 2860 2768 2864 2862 2770 2866 2864 2772 2868 2866 2774 2816 2814 2870 2818 2816 2872 2820 2818 2874 2822 2820 2876 2824 2822 2878 2828 2880 2826 2830 2882 2828 2832 2884 2830 2834 2886 2832 2836 2888 2834 2838 2890 2836 2840 2892 2838 2842 2894 2840 2844 2896 2842 2846 2898 2844 2848 2900 2846 2850 2902 2848 2852 2904 2850 2854 2906 2852 2856 2908 2854 2858 2910 2856 2860 2912 2858 2862 2914 2860 2864 2916 2862 2866 2918 2864 2868 2920 2866 2872 2816 2870 2874 2818 2872 2876 2820 2874 2878 2822 2876 2882 2880 2828 2884 2882 2830 2886 2884 2832 2888 2886 2834 2890 2888 2836 2892 2890 2838 2894 2892 2840 2896 2894 2842 2898 2896 2844 2900 2898 2846 2902 2900 2848 2904 2902 2850 2906 2904 2852 2908 2906 2854 2910 2908 2856 2912 2910 2858 2914 2912 2860 2916 2914 2862 2918 2916 2864 2920 2918 2866 2872 2870 2922 2874 2872 2924 2876 2874 2926 2878 2876 2928 2882 2930 2880 2884 2932 2882 2886 2934 2884 2888 2936 2886 2890 2938 2888 2892 2940 2890 2894 2942 2892 2896 2944 2894 2898 2946 2896 2900 2948 2898 2902 2950 2900 2904 2952 2902 2906 2954 2904 2908 2956 2906 2910 2958 2908 2912 2960 2910 2914 2962 2912 2916 2964 2914 2918 2966 2916 2920 2968 2918 2924 2872 2922 2926 2874 2924 2928 2876 2926 2932 2930 2882 2934 2932 2884 2936 2934 2886 2938 2936 2888 2940 2938 2890 2942 2940 2892 2944 2942 2894 2946 2944 2896 2948 2946 2898 2950 2948 2900 2952 2950 2902 2954 2952 2904 2956 2954 2906 2958 2956 2908 2960 2958 2910 2962 2960 2912 2964 2962 2914 2966 2964 2916 2968 2966 2918 2924 2922 2970 2926 2924 2972 2928 2926 2974 2932 2976 2930 2934 2978 2932 2936 2980 2934 2938 2982 2936 2940 2984 2938 2942 2986 2940 2944 2988 2942 2946 2990 2944 2948 2992 2946 2950 2994 2948 2952 2996 2950 2954 2998 2952 2956 3000 2954 2958 3002 2956 2960 3004 2958 2962 3006 2960 2964 3008 2962 2966 3010 2964 2968 3012 2966 2972 2924 2970 2974 2926 2972 2978 2976 2932 2980 2978 2934 2982 2980 2936 2984 2982 2938 2986 2984 2940 2988 2986 2942 2990 2988 2944 2992 2990 2946 2994 2992 2948 2996 2994 2950 2998 2996 2952 3000 2998 2954 3002 3000 2956 3004 3002 2958 3006 3004 2960 3008 3006 2962 3010 3008 2964 3012 3010 2966 2972 2970 3014 2974 2972 3016 2978 3018 2976 2980 3020 2978 2982 3022 2980 2984 3024 2982 2986 3026 2984 2988 3028 2986 2990 3030 2988 2992 3032 2990 2994 3034 2992 2996 3036 2994 2998 3038 2996 3000 3040 2998 3002 3042 3000 3004 3044 3002 3006 3046 3004 3008 3048 3006 3010 3050 3008 3012 3052 3010 3016 2972 3014 3020 3018 2978 3022 3020 2980 3024 3022 2982 3026 3024 2984 3028 3026 2986 3030 3028 2988 3032 3030 2990 3034 3032 2992 3036 3034 2994 3038 3036 2996 3040 3038 2998 3042 3040 3000 3044 3042 3002 3046 3044 3004 3048 3046 3006 3050 3048 3008 3052 3050 3010 3016 3014 3054 3020 3056 3018 3022 3058 3020 3024 3060 3022 3026 3062 3024 3028 3064 3026 3030 3066 3028 3032 3068 3030 3034 3070 3032 3036 3072 3034 3038 3074 3036 3040 3076 3038 3042 3078 3040 3044 3080 3042 3046 3082 3044 3048 3084 3046 3050 3086 3048 3052 3088 3050 3058 3056 3020 3060 3058 3022 3062 3060 3024 3064 3062 3026 3066 3064 3028 3068 3066 3030 3070 3068 3032 3072 3070 3034 3074 3072 3036 3076 3074 3038 3078 3076 3040 3080 3078 3042 3082 3080 3044 3084 3082 3046 3086 3084 3048 3088 3086 3050 3058 3090 3056 3060 3092 3058 3062 3094 3060 3064 3096 3062 3066 3098 3064 3068 3100 3066 3070 3102 3068 3072 3104 3070 3074 3106 3072 3076 3108 3074 3078 3110 3076 3080 3112 3078 3082 3114 3080 3084 3116 3082 3086 3118 3084 3088 3120 3086 3092 3090 3058 3094 3092 3060 3096 3094 3062 3098 3096 3064 3100 3098 3066 3102 3100 3068 3104 3102 3070 3106 3104 3072 3108 3106 3074 3110 3108 3076 3112 3110 3078 3114 3112 3080 3116 3114 3082 3118 3116 3084 3120 3118 3086 3092 3122 3090 3094 3124 3092 3096 3126 3094 3098 3128 3096 3100 3130 3098 3102 3132 3100 3104 3134 3102 3106 3136 3104 3108 3138 3106 3110 3140 3108 3112 3142 3110 3114 3144 3112 3116 3146 3114 3118 3148 3116 3120 3150 3118 3124 3122 3092 3126 3124 3094 3128 3126 3096 3130 3128 3098 3132 3130 3100 3134 3132 3102 3136 3134 3104 3138 3136 3106 3140 3138 3108 3142 3140 3110 3144 3142 3112 3146 3144 3114 3148 3146 3116 3150 3148 3118 3124 3152 3122 3126 3154 3124 3128 3156 3126 3130 3158 3128 3132 3160 3130 3134 3162 3132 3136 3164 3134 3138 3166 3136 3140 3168 3138 3142 3170 3140 3144 3172 3142 3146 3174 3144 3148 3176 3146 3150 3178 3148 3154 3152 3124 3156 3154 3126 3158 3156 3128 3160 3158 3130 3162 3160 3132 3164 3162 3134 3166 3164 3136 3168 3166 3138 3170 3168 3140 3172 3170 3142 3174 3172 3144 3176 3174 3146 3178 3176 3148 3154 3180 3152 3156 3182 3154 3158 3184 3156 3160 3186 3158 3162 3188 3160 3164 3190 3162 3166 3192 3164 3168 3194 3166 3170 3196 3168 3172 3198 3170 3174 3200 3172 3176 3202 3174 3178 3204 3176 3182 3180 3154 3184 3182 3156 3186 3184 3158 3188 3186 3160 3190 3188 3162 3192 3190 3164 3194 3192 3166 3196 3194 3168 3198 3196 3170 3200 3198 3172 3202 3200 3174 3204 3202 3176 3182 3206 3180 3184 3208 3182 3186 3210 3184 3188 3212 3186 3190 3214 3188 3192 3216 3190 3194 3218 3192 3196 3220 3194 3198 3222 3196 3200 3224 3198 3202 3226 3200 3204 3228 3202 3208 3206 3182 3210 3208 3184 3212 3210 3186 3214 3212 3188 3216 3214 3190 3218 3216 3192 3220 3218 3194 3222 3220 3196 3224 3222 3198 3226 3224 3200 3228 3226 3202 3208 3230 3206 3210 3232 3208 3212 3234 3210 3214 3236 3212 3216 3238 3214 3218 3240 3216 3220 3242 3218 3222 3244 3220 3224 3246 3222 3226 3248 3224 3228 3250 3226 3232 3230 3208 3234 3232 3210 3236 3234 3212 3238 3236 3214 3240 3238 3216 3242 3240 3218 3244 3242 3220 3246 3244 3222 3248 3246 3224 3250 3248 3226 3232 3252 3230 3234 3254 3232 3236 3256 3234 3238 3258 3236 3240 3260 3238 3242 3262 3240 3244 3264 3242 3246 3266 3244 3248 3268 3246 3250 3270 3248 3254 3252 3232 3256 3254 3234 3258 3256 3236 3260 3258 3238 3262 3260 3240 3264 3262 3242 3266 3264 3244 3268 3266 3246 3270 3268 3248 3254 3272 3252 3256 3274 3254 3258 3276 3256 3260 3278 3258 3262 3280 3260 3264 3282 3262 3266 3284 3264 3268 3286 3266 3270 3288 3268 3274 3272 3254 3276 3274 3256 3278 3276 3258 3280 3278 3260 3282 3280 3262 3284 3282 3264 3286 3284 3266 3288 3286 3268 3274 3290 3272 3276 3292 3274 3278 3294 3276 3280 3296 3278 3282 3298 3280 3284 3300 3282 3286 3302 3284 3288 3304 3286 3292 3290 3274 3294 3292 3276 3296 3294 3278 3298 3296 3280 3300 3298 3282 3302 3300 3284 3304 3302 3286 3292 3306 3290 3294 3308 3292 3296 3310 3294 3298 3312 3296 3300 3314 3298 3302 3316 3300 3304 3318 3302 3308 3306 3292 3310 3308 3294 3312 3310 3296 3314 3312 3298 3316 3314 3300 3318 3316 3302 3308 3320 3306 3310 3322 3308 3312 3324 3310 3314 3326 3312 3316 3328 3314 3318 3330 3316 3322 3320 3308 3324 3322 3310 3326 3324 3312 3328 3326 3314 3330 3328 3316 3322 3332 3320 3324 3334 3322 3326 3336 3324 3328 3338 3326 3330 3340 3328 3334 3332 3322 3336 3334 3324 3338 3336 3326 3340 3338 3328 3334 3342 3332 3336 3344 3334 3338 3346 3336 3340 3348 3338 3344 3342 3334 3346 3344 3336 3348 3346 3338 3344 3350 3342 3346 3352 3344 3348 3354 3346 3352 3350 3344 3354 3352 3346 3352 3356 3350 3354 3358 3352 3358 3356 3352 3358 3360 3356

+
+ + + +

3 0 4 1 5 2 4 3 7 4 5 5 9 6 4 7 3 8 3 9 5 10 11 11 4 12 13 13 7 14 5 15 7 16 15 17 9 18 17 19 4 20 19 21 9 22 3 23 21 24 3 25 11 26 11 27 5 28 23 29 13 30 25 31 7 32 17 33 13 34 4 35 7 36 27 37 15 38 5 39 15 40 23 41 29 42 17 43 9 44 31 45 9 46 19 47 19 48 3 49 21 50 21 51 11 52 33 53 11 54 23 55 35 56 13 57 37 58 25 59 7 60 25 61 27 62 17 63 39 64 13 65 15 66 27 67 41 68 23 69 15 70 43 71 29 72 45 73 17 74 31 75 29 76 9 77 47 78 31 79 19 80 49 81 19 82 21 83 51 84 21 85 33 86 33 87 11 88 35 89 35 90 23 91 53 92 37 93 55 94 25 95 39 96 37 97 13 98 25 99 57 100 27 101 45 102 39 103 17 104 27 105 59 106 41 107 15 108 41 109 43 110 23 111 43 112 53 113 61 114 45 115 29 116 63 117 29 118 31 119 65 120 31 121 47 122 47 123 19 124 49 125 49 126 21 127 51 128 51 129 33 130 67 131 33 132 35 133 69 134 35 135 53 136 71 137 37 138 73 139 55 140 25 141 55 142 57 143 39 144 75 145 37 146 27 147 57 148 59 149 45 150 77 151 39 152 41 153 59 154 79 155 43 156 41 157 81 158 53 159 43 160 83 161 61 162 85 163 45 164 63 165 61 166 29 167 65 168 63 169 31 170 87 171 65 172 47 173 89 174 47 175 49 176 91 177 49 178 51 179 93 180 51 181 67 182 67 183 33 184 69 185 69 186 35 187 71 188 71 189 53 190 95 191 73 192 97 193 55 194 75 195 73 196 37 197 55 198 99 199 57 200 77 201 75 202 39 203 57 204 101 205 59 206 85 207 77 208 45 209 59 210 103 211 79 212 41 213 79 214 81 215 43 216 81 217 83 218 53 219 83 220 95 221 105 222 85 223 61 224 107 225 61 226 63 227 109 228 63 229 65 230 111 231 65 232 87 233 87 234 47 235 89 236 89 237 49 238 91 239 91 240 51 241 93 242 93 243 67 244 113 245 67 246 69 247 115 248 69 249 71 250 117 251 71 252 95 253 119 254 73 255 121 256 97 257 55 258 97 259 99 260 75 261 123 262 73 263 57 264 99 265 101 266 77 267 125 268 75 269 59 270 101 271 103 272 85 273 127 274 77 275 79 276 103 277 129 278 81 279 79 280 131 281 83 282 81 283 133 284 95 285 83 286 135 287 105 288 137 289 85 290 107 291 105 292 61 293 109 294 107 295 63 296 111 297 109 298 65 299 139 300 111 301 87 302 141 303 87 304 89 305 143 306 89 307 91 308 145 309 91 310 93 311 147 312 93 313 113 314 113 315 67 316 115 317 115 318 69 319 117 320 117 321 71 322 119 323 119 324 95 325 149 326 121 327 151 328 97 329 123 330 121 331 73 332 97 333 153 334 99 335 125 336 123 337 75 338 99 339 155 340 101 341 127 342 125 343 77 344 101 345 157 346 103 347 137 348 127 349 85 350 103 351 159 352 129 353 79 354 129 355 131 356 81 357 131 358 133 359 83 360 133 361 135 362 95 363 135 364 149 365 161 366 137 367 105 368 163 369 105 370 107 371 165 372 107 373 109 374 167 375 109 376 111 377 169 378 111 379 139 380 139 381 87 382 141 383 141 384 89 385 143 386 143 387 91 388 145 389 145 390 93 391 147 392 147 393 113 394 171 395 113 396 115 397 173 398 115 399 117 400 175 401 117 402 119 403 177 404 119 405 149 406 179 407 121 408 181 409 151 410 97 411 151 412 153 413 123 414 183 415 121 416 99 417 153 418 155 419 125 420 185 421 123 422 101 423 155 424 157 425 127 426 187 427 125 428 103 429 157 430 159 431 137 432 189 433 127 434 129 435 159 436 191 437 131 438 129 439 193 440 133 441 131 442 195 443 135 444 133 445 197 446 149 447 135 448 199 449 161 450 201 451 137 452 163 453 161 454 105 455 165 456 163 457 107 458 167 459 165 460 109 461 169 462 167 463 111 464 203 465 169 466 139 467 205 468 139 469 141 470 207 471 141 472 143 473 209 474 143 475 145 476 211 477 145 478 147 479 213 480 147 481 171 482 171 483 113 484 173 485 173 486 115 487 175 488 175 489 117 490 177 491 177 492 119 493 179 494 179 495 149 496 215 497 181 498 217 499 151 500 183 501 181 502 121 503 151 504 219 505 153 506 185 507 183 508 123 509 153 510 221 511 155 512 187 513 185 514 125 515 155 516 223 517 157 518 189 519 187 520 127 521 157 522 225 523 159 524 201 525 189 526 137 527 159 528 227 529 191 530 129 531 191 532 193 533 131 534 193 535 195 536 133 537 195 538 197 539 135 540 197 541 199 542 149 543 199 544 215 545 229 546 201 547 161 548 231 549 161 550 163 551 233 552 163 553 165 554 235 555 165 556 167 557 237 558 167 559 169 560 239 561 169 562 203 563 203 564 139 565 205 566 205 567 141 568 207 569 207 570 143 571 209 572 209 573 145 574 211 575 211 576 147 577 213 578 213 579 171 580 241 581 171 582 173 583 243 584 173 585 175 586 245 587 175 588 177 589 247 590 177 591 179 592 249 593 179 594 215 595 251 596 181 597 253 598 217 599 151 600 217 601 219 602 183 603 255 604 181 605 153 606 219 607 221 608 185 609 257 610 183 611 155 612 221 613 223 614 187 615 259 616 185 617 157 618 223 619 225 620 189 621 261 622 187 623 159 624 225 625 227 626 201 627 263 628 189 629 191 630 227 631 265 632 193 633 191 634 267 635 195 636 193 637 269 638 197 639 195 640 271 641 199 642 197 643 273 644 215 645 199 646 275 647 229 648 277 649 201 650 231 651 229 652 161 653 233 654 231 655 163 656 235 657 233 658 165 659 237 660 235 661 167 662 239 663 237 664 169 665 279 666 239 667 203 668 281 669 203 670 205 671 283 672 205 673 207 674 285 675 207 676 209 677 287 678 209 679 211 680 289 681 211 682 213 683 291 684 213 685 241 686 241 687 171 688 243 689 243 690 173 691 245 692 245 693 175 694 247 695 247 696 177 697 249 698 249 699 179 700 251 701 251 702 215 703 293 704 253 705 295 706 217 707 255 708 253 709 181 710 217 711 297 712 219 713 257 714 255 715 183 716 219 717 299 718 221 719 259 720 257 721 185 722 221 723 301 724 223 725 261 726 259 727 187 728 223 729 303 730 225 731 263 732 261 733 189 734 225 735 305 736 227 737 277 738 263 739 201 740 227 741 307 742 265 743 191 744 265 745 267 746 193 747 267 748 269 749 195 750 269 751 271 752 197 753 271 754 273 755 199 756 273 757 275 758 215 759 275 760 293 761 309 762 277 763 229 764 311 765 229 766 231 767 313 768 231 769 233 770 315 771 233 772 235 773 317 774 235 775 237 776 319 777 237 778 239 779 321 780 239 781 279 782 279 783 203 784 281 785 281 786 205 787 283 788 283 789 207 790 285 791 285 792 209 793 287 794 287 795 211 796 289 797 289 798 213 799 291 800 291 801 241 802 323 803 241 804 243 805 325 806 243 807 245 808 327 809 245 810 247 811 329 812 247 813 249 814 331 815 249 816 251 817 333 818 251 819 293 820 335 821 253 822 337 823 295 824 217 825 295 826 297 827 255 828 339 829 253 830 219 831 297 832 299 833 257 834 341 835 255 836 221 837 299 838 301 839 259 840 343 841 257 842 223 843 301 844 303 845 261 846 345 847 259 848 225 849 303 850 305 851 263 852 347 853 261 854 227 855 305 856 307 857 277 858 349 859 263 860 265 861 307 862 351 863 267 864 265 865 353 866 269 867 267 868 355 869 271 870 269 871 357 872 273 873 271 874 359 875 275 876 273 877 361 878 293 879 275 880 363 881 309 882 365 883 277 884 311 885 309 886 229 887 313 888 311 889 231 890 315 891 313 892 233 893 317 894 315 895 235 896 319 897 317 898 237 899 321 900 319 901 239 902 367 903 321 904 279 905 369 906 279 907 281 908 371 909 281 910 283 911 373 912 283 913 285 914 375 915 285 916 287 917 377 918 287 919 289 920 379 921 289 922 291 923 381 924 291 925 323 926 323 927 241 928 325 929 325 930 243 931 327 932 327 933 245 934 329 935 329 936 247 937 331 938 331 939 249 940 333 941 333 942 251 943 335 944 335 945 293 946 383 947 337 948 385 949 295 950 339 951 337 952 253 953 295 954 387 955 297 956 341 957 339 958 255 959 297 960 389 961 299 962 343 963 341 964 257 965 299 966 391 967 301 968 345 969 343 970 259 971 301 972 393 973 303 974 347 975 345 976 261 977 303 978 395 979 305 980 349 981 347 982 263 983 305 984 397 985 307 986 365 987 349 988 277 989 307 990 399 991 351 992 265 993 351 994 353 995 267 996 353 997 355 998 269 999 355 1000 357 1001 271 1002 357 1003 359 1004 273 1005 359 1006 361 1007 275 1008 361 1009 363 1010 293 1011 363 1012 383 1013 401 1014 365 1015 309 1016 403 1017 309 1018 311 1019 405 1020 311 1021 313 1022 407 1023 313 1024 315 1025 409 1026 315 1027 317 1028 411 1029 317 1030 319 1031 413 1032 319 1033 321 1034 415 1035 321 1036 367 1037 367 1038 279 1039 369 1040 369 1041 281 1042 371 1043 371 1044 283 1045 373 1046 373 1047 285 1048 375 1049 375 1050 287 1051 377 1052 377 1053 289 1054 379 1055 379 1056 291 1057 381 1058 381 1059 323 1060 417 1061 323 1062 325 1063 419 1064 325 1065 327 1066 421 1067 327 1068 329 1069 423 1070 329 1071 331 1072 425 1073 331 1074 333 1075 427 1076 333 1077 335 1078 429 1079 335 1080 383 1081 431 1082 337 1083 433 1084 385 1085 295 1086 385 1087 387 1088 339 1089 435 1090 337 1091 297 1092 387 1093 389 1094 341 1095 437 1096 339 1097 299 1098 389 1099 391 1100 343 1101 439 1102 341 1103 301 1104 391 1105 393 1106 345 1107 441 1108 343 1109 303 1110 393 1111 395 1112 347 1113 443 1114 345 1115 305 1116 395 1117 397 1118 349 1119 445 1120 347 1121 307 1122 397 1123 399 1124 365 1125 447 1126 349 1127 351 1128 399 1129 449 1130 353 1131 351 1132 451 1133 355 1134 353 1135 453 1136 357 1137 355 1138 455 1139 359 1140 357 1141 457 1142 361 1143 359 1144 459 1145 363 1146 361 1147 461 1148 383 1149 363 1150 463 1151 401 1152 465 1153 365 1154 403 1155 401 1156 309 1157 405 1158 403 1159 311 1160 407 1161 405 1162 313 1163 409 1164 407 1165 315 1166 411 1167 409 1168 317 1169 413 1170 411 1171 319 1172 415 1173 413 1174 321 1175 467 1176 415 1177 367 1178 469 1179 367 1180 369 1181 471 1182 369 1183 371 1184 473 1185 371 1186 373 1187 475 1188 373 1189 375 1190 477 1191 375 1192 377 1193 479 1194 377 1195 379 1196 481 1197 379 1198 381 1199 483 1200 381 1201 417 1202 417 1203 323 1204 419 1205 419 1206 325 1207 421 1208 421 1209 327 1210 423 1211 423 1212 329 1213 425 1214 425 1215 331 1216 427 1217 427 1218 333 1219 429 1220 429 1221 335 1222 431 1223 431 1224 383 1225 485 1226 433 1227 487 1228 385 1229 435 1230 433 1231 337 1232 385 1233 489 1234 387 1235 437 1236 435 1237 339 1238 387 1239 491 1240 389 1241 439 1242 437 1243 341 1244 389 1245 493 1246 391 1247 441 1248 439 1249 343 1250 391 1251 495 1252 393 1253 443 1254 441 1255 345 1256 393 1257 497 1258 395 1259 445 1260 443 1261 347 1262 395 1263 499 1264 397 1265 447 1266 445 1267 349 1268 397 1269 501 1270 399 1271 465 1272 447 1273 365 1274 399 1275 503 1276 449 1277 351 1278 449 1279 451 1280 353 1281 451 1282 453 1283 355 1284 453 1285 455 1286 357 1287 455 1288 457 1289 359 1290 457 1291 459 1292 361 1293 459 1294 461 1295 363 1296 461 1297 463 1298 383 1299 463 1300 485 1301 505 1302 465 1303 401 1304 507 1305 401 1306 403 1307 509 1308 403 1309 405 1310 511 1311 405 1312 407 1313 513 1314 407 1315 409 1316 515 1317 409 1318 411 1319 517 1320 411 1321 413 1322 519 1323 413 1324 415 1325 521 1326 415 1327 467 1328 467 1329 367 1330 469 1331 469 1332 369 1333 471 1334 471 1335 371 1336 473 1337 473 1338 373 1339 475 1340 475 1341 375 1342 477 1343 477 1344 377 1345 479 1346 479 1347 379 1348 481 1349 481 1350 381 1351 483 1352 483 1353 417 1354 523 1355 417 1356 419 1357 525 1358 419 1359 421 1360 527 1361 421 1362 423 1363 529 1364 423 1365 425 1366 531 1367 425 1368 427 1369 533 1370 427 1371 429 1372 535 1373 429 1374 431 1375 537 1376 431 1377 485 1378 539 1379 433 1380 541 1381 487 1382 385 1383 487 1384 489 1385 435 1386 543 1387 433 1388 387 1389 489 1390 491 1391 437 1392 545 1393 435 1394 389 1395 491 1396 493 1397 439 1398 547 1399 437 1400 391 1401 493 1402 495 1403 441 1404 549 1405 439 1406 393 1407 495 1408 497 1409 443 1410 551 1411 441 1412 395 1413 497 1414 499 1415 445 1416 553 1417 443 1418 397 1419 499 1420 501 1421 447 1422 555 1423 445 1424 399 1425 501 1426 503 1427 465 1428 557 1429 447 1430 449 1431 503 1432 559 1433 451 1434 449 1435 561 1436 453 1437 451 1438 563 1439 455 1440 453 1441 565 1442 457 1443 455 1444 567 1445 459 1446 457 1447 569 1448 461 1449 459 1450 571 1451 463 1452 461 1453 573 1454 485 1455 463 1456 575 1457 505 1458 577 1459 465 1460 507 1461 505 1462 401 1463 509 1464 507 1465 403 1466 511 1467 509 1468 405 1469 513 1470 511 1471 407 1472 515 1473 513 1474 409 1475 517 1476 515 1477 411 1478 519 1479 517 1480 413 1481 521 1482 519 1483 415 1484 579 1485 521 1486 467 1487 581 1488 467 1489 469 1490 583 1491 469 1492 471 1493 585 1494 471 1495 473 1496 587 1497 473 1498 475 1499 589 1500 475 1501 477 1502 591 1503 477 1504 479 1505 593 1506 479 1507 481 1508 595 1509 481 1510 483 1511 597 1512 483 1513 523 1514 523 1515 417 1516 525 1517 525 1518 419 1519 527 1520 527 1521 421 1522 529 1523 529 1524 423 1525 531 1526 531 1527 425 1528 533 1529 533 1530 427 1531 535 1532 535 1533 429 1534 537 1535 537 1536 431 1537 539 1538 539 1539 485 1540 599 1541 541 1542 601 1543 487 1544 543 1545 541 1546 433 1547 487 1548 603 1549 489 1550 545 1551 543 1552 435 1553 489 1554 605 1555 491 1556 547 1557 545 1558 437 1559 491 1560 607 1561 493 1562 549 1563 547 1564 439 1565 493 1566 609 1567 495 1568 551 1569 549 1570 441 1571 495 1572 611 1573 497 1574 553 1575 551 1576 443 1577 497 1578 613 1579 499 1580 555 1581 553 1582 445 1583 499 1584 615 1585 501 1586 557 1587 555 1588 447 1589 501 1590 617 1591 503 1592 577 1593 557 1594 465 1595 503 1596 619 1597 559 1598 449 1599 559 1600 561 1601 451 1602 561 1603 563 1604 453 1605 563 1606 565 1607 455 1608 565 1609 567 1610 457 1611 567 1612 569 1613 459 1614 569 1615 571 1616 461 1617 571 1618 573 1619 463 1620 573 1621 575 1622 485 1623 575 1624 599 1625 621 1626 577 1627 505 1628 623 1629 505 1630 507 1631 625 1632 507 1633 509 1634 627 1635 509 1636 511 1637 629 1638 511 1639 513 1640 631 1641 513 1642 515 1643 633 1644 515 1645 517 1646 635 1647 517 1648 519 1649 637 1650 519 1651 521 1652 639 1653 521 1654 579 1655 579 1656 467 1657 581 1658 581 1659 469 1660 583 1661 583 1662 471 1663 585 1664 585 1665 473 1666 587 1667 587 1668 475 1669 589 1670 589 1671 477 1672 591 1673 591 1674 479 1675 593 1676 593 1677 481 1678 595 1679 595 1680 483 1681 597 1682 597 1683 523 1684 641 1685 523 1686 525 1687 643 1688 525 1689 527 1690 645 1691 527 1692 529 1693 647 1694 529 1695 531 1696 649 1697 531 1698 533 1699 651 1700 533 1701 535 1702 653 1703 535 1704 537 1705 655 1706 537 1707 539 1708 657 1709 539 1710 599 1711 659 1712 541 1713 661 1714 601 1715 487 1716 601 1717 603 1718 543 1719 663 1720 541 1721 489 1722 603 1723 605 1724 545 1725 665 1726 543 1727 491 1728 605 1729 607 1730 547 1731 667 1732 545 1733 493 1734 607 1735 609 1736 549 1737 669 1738 547 1739 495 1740 609 1741 611 1742 551 1743 671 1744 549 1745 497 1746 611 1747 613 1748 553 1749 673 1750 551 1751 499 1752 613 1753 615 1754 555 1755 675 1756 553 1757 501 1758 615 1759 617 1760 557 1761 677 1762 555 1763 503 1764 617 1765 619 1766 577 1767 679 1768 557 1769 559 1770 619 1771 681 1772 561 1773 559 1774 683 1775 563 1776 561 1777 685 1778 565 1779 563 1780 687 1781 567 1782 565 1783 689 1784 569 1785 567 1786 691 1787 571 1788 569 1789 693 1790 573 1791 571 1792 695 1793 575 1794 573 1795 697 1796 599 1797 575 1798 699 1799 621 1800 701 1801 577 1802 623 1803 621 1804 505 1805 625 1806 623 1807 507 1808 627 1809 625 1810 509 1811 629 1812 627 1813 511 1814 631 1815 629 1816 513 1817 633 1818 631 1819 515 1820 635 1821 633 1822 517 1823 637 1824 635 1825 519 1826 639 1827 637 1828 521 1829 703 1830 639 1831 579 1832 705 1833 579 1834 581 1835 707 1836 581 1837 583 1838 709 1839 583 1840 585 1841 711 1842 585 1843 587 1844 713 1845 587 1846 589 1847 715 1848 589 1849 591 1850 717 1851 591 1852 593 1853 719 1854 593 1855 595 1856 721 1857 595 1858 597 1859 723 1860 597 1861 641 1862 641 1863 523 1864 643 1865 643 1866 525 1867 645 1868 645 1869 527 1870 647 1871 647 1872 529 1873 649 1874 649 1875 531 1876 651 1877 651 1878 533 1879 653 1880 653 1881 535 1882 655 1883 655 1884 537 1885 657 1886 657 1887 539 1888 659 1889 659 1890 599 1891 725 1892 661 1893 727 1894 601 1895 663 1896 661 1897 541 1898 601 1899 729 1900 603 1901 665 1902 663 1903 543 1904 603 1905 731 1906 605 1907 667 1908 665 1909 545 1910 605 1911 733 1912 607 1913 669 1914 667 1915 547 1916 607 1917 735 1918 609 1919 671 1920 669 1921 549 1922 609 1923 737 1924 611 1925 673 1926 671 1927 551 1928 611 1929 739 1930 613 1931 675 1932 673 1933 553 1934 613 1935 741 1936 615 1937 677 1938 675 1939 555 1940 615 1941 743 1942 617 1943 679 1944 677 1945 557 1946 617 1947 745 1948 619 1949 701 1950 679 1951 577 1952 619 1953 747 1954 681 1955 559 1956 681 1957 683 1958 561 1959 683 1960 685 1961 563 1962 685 1963 687 1964 565 1965 687 1966 689 1967 567 1968 689 1969 691 1970 569 1971 691 1972 693 1973 571 1974 693 1975 695 1976 573 1977 695 1978 697 1979 575 1980 697 1981 699 1982 599 1983 699 1984 725 1985 749 1986 701 1987 621 1988 751 1989 621 1990 623 1991 753 1992 623 1993 625 1994 755 1995 625 1996 627 1997 757 1998 627 1999 629 2000 759 2001 629 2002 631 2003 761 2004 631 2005 633 2006 763 2007 633 2008 635 2009 765 2010 635 2011 637 2012 767 2013 637 2014 639 2015 769 2016 639 2017 703 2018 703 2019 579 2020 705 2021 705 2022 581 2023 707 2024 707 2025 583 2026 709 2027 709 2028 585 2029 711 2030 711 2031 587 2032 713 2033 713 2034 589 2035 715 2036 715 2037 591 2038 717 2039 717 2040 593 2041 719 2042 719 2043 595 2044 721 2045 721 2046 597 2047 723 2048 723 2049 641 2050 771 2051 641 2052 643 2053 773 2054 643 2055 645 2056 775 2057 645 2058 647 2059 777 2060 647 2061 649 2062 779 2063 649 2064 651 2065 781 2066 651 2067 653 2068 783 2069 653 2070 655 2071 785 2072 655 2073 657 2074 787 2075 657 2076 659 2077 789 2078 659 2079 725 2080 791 2081 661 2082 793 2083 727 2084 601 2085 727 2086 729 2087 663 2088 795 2089 661 2090 603 2091 729 2092 731 2093 665 2094 797 2095 663 2096 605 2097 731 2098 733 2099 667 2100 799 2101 665 2102 607 2103 733 2104 735 2105 669 2106 801 2107 667 2108 609 2109 735 2110 737 2111 671 2112 803 2113 669 2114 611 2115 737 2116 739 2117 673 2118 805 2119 671 2120 613 2121 739 2122 741 2123 675 2124 807 2125 673 2126 615 2127 741 2128 743 2129 677 2130 809 2131 675 2132 617 2133 743 2134 745 2135 679 2136 811 2137 677 2138 619 2139 745 2140 747 2141 701 2142 813 2143 679 2144 681 2145 747 2146 815 2147 683 2148 681 2149 817 2150 685 2151 683 2152 819 2153 687 2154 685 2155 821 2156 689 2157 687 2158 823 2159 691 2160 689 2161 825 2162 693 2163 691 2164 827 2165 695 2166 693 2167 829 2168 697 2169 695 2170 831 2171 699 2172 697 2173 833 2174 725 2175 699 2176 835 2177 749 2178 837 2179 701 2180 751 2181 749 2182 621 2183 753 2184 751 2185 623 2186 755 2187 753 2188 625 2189 757 2190 755 2191 627 2192 759 2193 757 2194 629 2195 761 2196 759 2197 631 2198 763 2199 761 2200 633 2201 765 2202 763 2203 635 2204 767 2205 765 2206 637 2207 769 2208 767 2209 639 2210 839 2211 769 2212 703 2213 841 2214 703 2215 705 2216 843 2217 705 2218 707 2219 845 2220 707 2221 709 2222 847 2223 709 2224 711 2225 849 2226 711 2227 713 2228 851 2229 713 2230 715 2231 853 2232 715 2233 717 2234 855 2235 717 2236 719 2237 857 2238 719 2239 721 2240 859 2241 721 2242 723 2243 861 2244 723 2245 771 2246 771 2247 641 2248 773 2249 773 2250 643 2251 775 2252 775 2253 645 2254 777 2255 777 2256 647 2257 779 2258 779 2259 649 2260 781 2261 781 2262 651 2263 783 2264 783 2265 653 2266 785 2267 785 2268 655 2269 787 2270 787 2271 657 2272 789 2273 789 2274 659 2275 791 2276 791 2277 725 2278 863 2279 793 2280 865 2281 727 2282 795 2283 793 2284 661 2285 727 2286 867 2287 729 2288 797 2289 795 2290 663 2291 729 2292 869 2293 731 2294 799 2295 797 2296 665 2297 731 2298 871 2299 733 2300 801 2301 799 2302 667 2303 733 2304 873 2305 735 2306 803 2307 801 2308 669 2309 735 2310 875 2311 737 2312 805 2313 803 2314 671 2315 737 2316 877 2317 739 2318 807 2319 805 2320 673 2321 739 2322 879 2323 741 2324 809 2325 807 2326 675 2327 741 2328 881 2329 743 2330 811 2331 809 2332 677 2333 743 2334 883 2335 745 2336 813 2337 811 2338 679 2339 745 2340 885 2341 747 2342 837 2343 813 2344 701 2345 747 2346 887 2347 815 2348 681 2349 815 2350 817 2351 683 2352 817 2353 819 2354 685 2355 819 2356 821 2357 687 2358 821 2359 823 2360 689 2361 823 2362 825 2363 691 2364 825 2365 827 2366 693 2367 827 2368 829 2369 695 2370 829 2371 831 2372 697 2373 831 2374 833 2375 699 2376 833 2377 835 2378 725 2379 835 2380 863 2381 889 2382 837 2383 749 2384 891 2385 749 2386 751 2387 893 2388 751 2389 753 2390 895 2391 753 2392 755 2393 897 2394 755 2395 757 2396 899 2397 757 2398 759 2399 901 2400 759 2401 761 2402 903 2403 761 2404 763 2405 905 2406 763 2407 765 2408 907 2409 765 2410 767 2411 909 2412 767 2413 769 2414 911 2415 769 2416 839 2417 839 2418 703 2419 841 2420 841 2421 705 2422 843 2423 843 2424 707 2425 845 2426 845 2427 709 2428 847 2429 847 2430 711 2431 849 2432 849 2433 713 2434 851 2435 851 2436 715 2437 853 2438 853 2439 717 2440 855 2441 855 2442 719 2443 857 2444 857 2445 721 2446 859 2447 859 2448 723 2449 861 2450 861 2451 771 2452 913 2453 771 2454 773 2455 915 2456 773 2457 775 2458 917 2459 775 2460 777 2461 919 2462 777 2463 779 2464 921 2465 779 2466 781 2467 923 2468 781 2469 783 2470 925 2471 783 2472 785 2473 927 2474 785 2475 787 2476 929 2477 787 2478 789 2479 931 2480 789 2481 791 2482 933 2483 791 2484 863 2485 935 2486 793 2487 937 2488 865 2489 727 2490 865 2491 867 2492 795 2493 939 2494 793 2495 729 2496 867 2497 869 2498 797 2499 941 2500 795 2501 731 2502 869 2503 871 2504 799 2505 943 2506 797 2507 733 2508 871 2509 873 2510 801 2511 945 2512 799 2513 735 2514 873 2515 875 2516 803 2517 947 2518 801 2519 737 2520 875 2521 877 2522 805 2523 949 2524 803 2525 739 2526 877 2527 879 2528 807 2529 951 2530 805 2531 741 2532 879 2533 881 2534 809 2535 953 2536 807 2537 743 2538 881 2539 883 2540 811 2541 955 2542 809 2543 745 2544 883 2545 885 2546 813 2547 957 2548 811 2549 747 2550 885 2551 887 2552 837 2553 959 2554 813 2555 815 2556 887 2557 961 2558 817 2559 815 2560 963 2561 819 2562 817 2563 965 2564 821 2565 819 2566 967 2567 823 2568 821 2569 969 2570 825 2571 823 2572 971 2573 827 2574 825 2575 973 2576 829 2577 827 2578 975 2579 831 2580 829 2581 977 2582 833 2583 831 2584 979 2585 835 2586 833 2587 981 2588 863 2589 835 2590 983 2591 889 2592 985 2593 837 2594 891 2595 889 2596 749 2597 893 2598 891 2599 751 2600 895 2601 893 2602 753 2603 897 2604 895 2605 755 2606 899 2607 897 2608 757 2609 901 2610 899 2611 759 2612 903 2613 901 2614 761 2615 905 2616 903 2617 763 2618 907 2619 905 2620 765 2621 909 2622 907 2623 767 2624 911 2625 909 2626 769 2627 987 2628 911 2415 839 2417 989 2629 839 2630 841 2631 991 2632 841 2633 843 2634 993 2635 843 2636 845 2637 995 2638 845 2639 847 2640 997 2641 847 2642 849 2643 999 2644 849 2645 851 2646 1001 2647 851 2648 853 2649 1003 2650 853 2651 855 2652 1005 2653 855 2654 857 2655 1007 2656 857 2657 859 2658 1009 2659 859 2660 861 2661 1011 2662 861 2663 913 2664 913 2665 771 2666 915 2667 915 2668 773 2669 917 2670 917 2671 775 2672 919 2673 919 2674 777 2675 921 2676 921 2677 779 2678 923 2679 923 2680 781 2681 925 2682 925 2683 783 2684 927 2685 927 2686 785 2687 929 2688 929 2689 787 2690 931 2691 931 2692 789 2693 933 2694 933 2695 791 2696 935 2697 935 2698 863 2699 1013 2700 937 2701 1015 2702 865 2703 939 2704 937 2705 793 2706 865 2707 1017 2708 867 2709 941 2710 939 2711 795 2712 867 2713 1019 2714 869 2715 943 2716 941 2717 797 2718 869 2719 1021 2720 871 2721 945 2722 943 2723 799 2724 871 2725 1023 2726 873 2727 947 2728 945 2729 801 2730 873 2731 1025 2732 875 2733 949 2734 947 2735 803 2736 875 2737 1027 2738 877 2739 951 2740 949 2741 805 2742 877 2743 1029 2744 879 2745 953 2746 951 2747 807 2748 879 2749 1031 2750 881 2751 955 2752 953 2753 809 2754 881 2755 1033 2756 883 2757 957 2758 955 2759 811 2760 883 2761 1035 2762 885 2763 959 2764 957 2765 813 2766 885 2767 1037 2768 887 2769 985 2770 959 2771 837 2772 887 2773 1039 2774 961 2775 815 2776 961 2777 963 2778 817 2779 963 2780 965 2781 819 2782 965 2783 967 2784 821 2785 967 2786 969 2787 823 2788 969 2789 971 2790 825 2791 971 2792 973 2793 827 2794 973 2795 975 2796 829 2797 975 2798 977 2799 831 2800 977 2801 979 2802 833 2803 979 2804 981 2805 835 2806 981 2807 983 2808 863 2809 983 2810 1013 2811 1041 2812 985 2813 889 2814 1043 2815 889 2816 891 2817 1045 2818 891 2819 893 2820 1047 2821 893 2822 895 2823 1049 2824 895 2825 897 2826 1051 2827 897 2828 899 2829 1053 2830 899 2831 901 2832 1055 2833 901 2834 903 2835 1057 2836 903 2837 905 2838 1059 2839 905 2840 907 2841 1061 2842 907 2843 909 2844 1063 2845 909 2846 911 2847 1065 2848 911 2415 987 2628 987 2849 839 2850 989 2851 989 2852 841 2853 991 2854 991 2855 843 2856 993 2857 993 2858 845 2859 995 2860 995 2861 847 2862 997 2863 997 2864 849 2865 999 2866 999 2867 851 2868 1001 2869 1001 2870 853 2871 1003 2872 1003 2873 855 2874 1005 2875 1005 2876 857 2877 1007 2878 1007 2879 859 2880 1009 2881 1009 2882 861 2883 1011 2884 1011 2885 913 2886 1067 2887 913 2888 915 2889 1069 2890 915 2891 917 2892 1071 2893 917 2894 919 2895 1073 2896 919 2897 921 2898 1075 2899 921 2900 923 2901 1077 2902 923 2903 925 2904 1079 2905 925 2906 927 2907 1081 2908 927 2909 929 2910 1083 2911 929 2912 931 2913 1085 2914 931 2915 933 2916 1087 2917 933 2918 935 2919 1089 2920 935 2921 1013 2922 1091 2923 937 2924 1093 2925 1015 2926 865 2927 1015 2928 1017 2929 939 2930 1095 2931 937 2932 867 2933 1017 2934 1019 2935 941 2936 1097 2937 939 2938 869 2939 1019 2940 1021 2941 943 2942 1099 2943 941 2944 871 2945 1021 2946 1023 2947 945 2948 1101 2949 943 2950 873 2951 1023 2952 1025 2953 947 2954 1103 2955 945 2956 875 2957 1025 2958 1027 2959 949 2960 1105 2961 947 2962 877 2963 1027 2964 1029 2965 951 2966 1107 2967 949 2968 879 2969 1029 2970 1031 2971 953 2972 1109 2973 951 2974 881 2975 1031 2976 1033 2977 955 2978 1111 2979 953 2980 883 2981 1033 2982 1035 2983 957 2984 1113 2985 955 2986 885 2987 1035 2988 1037 2989 959 2990 1115 2991 957 2992 887 2993 1037 2994 1039 2995 985 2996 1117 2997 959 2998 961 2999 1039 3000 1119 3001 963 3002 961 3003 1121 3004 965 3005 963 3006 1123 3007 967 3008 965 3009 1125 3010 969 3011 967 3012 1127 3013 971 3014 969 3015 1129 3016 973 3017 971 3018 1131 3019 975 3020 973 3021 1133 3022 977 3023 975 3024 1135 3025 979 3026 977 3027 1137 3028 981 3029 979 3030 1139 3031 983 3032 981 3033 1141 3034 1013 3035 983 3036 1143 3037 1041 3038 1145 3039 985 3040 1043 3041 1041 3042 889 3043 1045 3044 1043 3045 891 3046 1047 3047 1045 3048 893 3049 1049 3050 1047 3051 895 3052 1051 3053 1049 3054 897 3055 1053 3056 1051 3057 899 3058 1055 3059 1053 3060 901 3061 1057 3062 1055 3063 903 3064 1059 3065 1057 3066 905 3067 1061 3068 1059 3069 907 3070 1063 3071 1061 3072 909 3073 1065 2848 1063 3074 911 2415 1147 3075 1065 2848 987 2628 1149 3076 987 3077 989 3078 1151 3079 989 3080 991 3081 1153 3082 991 3083 993 3084 1155 3085 993 3086 995 3087 1157 3088 995 3089 997 3090 1159 3091 997 3092 999 3093 1161 3094 999 3095 1001 3096 1163 3097 1001 3098 1003 3099 1165 3100 1003 3101 1005 3102 1167 3103 1005 3104 1007 3105 1169 3106 1007 3107 1009 3108 1171 3109 1009 3110 1011 3111 1173 3112 1011 3113 1067 3114 1067 3115 913 3116 1069 3117 1069 3118 915 3119 1071 3120 1071 3121 917 3122 1073 3123 1073 3124 919 3125 1075 3126 1075 3127 921 3128 1077 3129 1077 3130 923 3131 1079 3132 1079 3133 925 3134 1081 3135 1081 3136 927 3137 1083 3138 1083 3139 929 3140 1085 3141 1085 3142 931 3143 1087 3144 1087 3145 933 3146 1089 3147 1089 3148 935 3149 1091 3150 1091 3151 1013 3152 1175 3153 1093 3154 1177 3155 1015 3156 1095 3157 1093 3158 937 3159 1015 3160 1179 3161 1017 3162 1097 3163 1095 3164 939 3165 1017 3166 1181 3167 1019 3168 1099 3169 1097 3170 941 3171 1019 3172 1183 3173 1021 3174 1101 3175 1099 3176 943 3177 1021 3178 1185 3179 1023 3180 1103 3181 1101 3182 945 3183 1023 3184 1187 3185 1025 3186 1105 3187 1103 3188 947 3189 1025 3190 1189 3191 1027 3192 1107 3193 1105 3194 949 3195 1027 3196 1191 3197 1029 3198 1109 3199 1107 3200 951 3201 1029 3202 1193 3203 1031 3204 1111 3205 1109 3206 953 3207 1031 3208 1195 3209 1033 3210 1113 3211 1111 3212 955 3213 1033 3214 1197 3215 1035 3216 1115 3217 1113 3218 957 3219 1035 3220 1199 3221 1037 3222 1117 3223 1115 3224 959 3225 1037 3226 1201 3227 1039 3228 1145 3229 1117 3230 985 3231 1039 3232 1203 3233 1119 3234 961 3235 1119 3236 1121 3237 963 3238 1121 3239 1123 3240 965 3241 1123 3242 1125 3243 967 3244 1125 3245 1127 3246 969 3247 1127 3248 1129 3249 971 3250 1129 3251 1131 3252 973 3253 1131 3254 1133 3255 975 3256 1133 3257 1135 3258 977 3259 1135 3260 1137 3261 979 3262 1137 3263 1139 3264 981 3265 1139 3266 1141 3267 983 3268 1141 3269 1143 3270 1013 3271 1143 3272 1175 3273 1147 3274 987 3275 1149 3276 1149 3277 989 3278 1151 3279 1151 3280 991 3281 1153 3282 1153 3283 993 3284 1155 3285 1155 3286 995 3287 1157 3288 1157 3289 997 3290 1159 3291 1159 3292 999 3293 1161 3294 1161 3295 1001 3296 1163 3297 1163 3298 1003 3299 1165 3300 1165 3301 1005 3302 1167 3303 1167 3304 1007 3305 1169 3306 1169 3307 1009 3308 1171 3309 1171 3310 1011 3311 1173 3312 1173 3313 1067 3314 1205 3315 1067 3316 1069 3317 1207 3318 1069 3319 1071 3320 1209 3321 1071 3322 1073 3323 1211 3324 1073 3325 1075 3326 1213 3327 1075 3328 1077 3329 1215 3330 1077 3331 1079 3332 1217 3333 1079 3334 1081 3335 1219 3336 1081 3337 1083 3338 1221 3339 1083 3340 1085 3341 1223 3342 1085 3343 1087 3344 1225 3345 1087 3346 1089 3347 1227 3348 1089 3349 1091 3350 1229 3351 1091 3352 1175 3353 1231 3354 1093 3355 1233 3356 1177 3357 1015 3358 1177 3359 1179 3360 1095 3361 1235 3362 1093 3363 1017 3364 1179 3365 1181 3366 1097 3367 1237 3368 1095 3369 1019 3370 1181 3371 1183 3372 1099 3373 1239 3374 1097 3375 1021 3376 1183 3377 1185 3378 1101 3379 1241 3380 1099 3381 1023 3382 1185 3383 1187 3384 1103 3385 1243 3386 1101 3387 1025 3388 1187 3389 1189 3390 1105 3391 1245 3392 1103 3393 1027 3394 1189 3395 1191 3396 1107 3397 1247 3398 1105 3399 1029 3400 1191 3401 1193 3402 1109 3403 1249 3404 1107 3405 1031 3406 1193 3407 1195 3408 1111 3409 1251 3410 1109 3411 1033 3412 1195 3413 1197 3414 1113 3415 1253 3416 1111 3417 1035 3418 1197 3419 1199 3420 1115 3421 1255 3422 1113 3423 1037 3424 1199 3425 1201 3426 1117 3427 1257 3428 1115 3429 1039 3430 1201 3431 1203 3432 1145 3433 1259 3434 1117 3435 1119 3436 1203 3437 1261 3438 1121 3439 1119 3440 1263 3441 1123 3442 1121 3443 1265 3444 1125 3445 1123 3446 1267 3447 1127 3448 1125 3449 1269 3450 1129 3451 1127 3452 1271 3453 1131 3454 1129 3455 1273 3456 1133 3457 1131 3458 1275 3459 1135 3460 1133 3461 1277 3462 1137 3463 1135 3464 1279 3465 1139 3466 1137 3467 1281 3468 1141 3469 1139 3470 1283 3471 1143 3472 1141 3473 1285 3474 1175 3475 1143 3476 1287 3477 1289 3478 1147 3479 1149 3480 1291 3481 1149 3482 1151 3483 1293 3484 1151 3485 1153 3486 1295 3487 1153 3488 1155 3489 1297 3490 1155 3491 1157 3492 1299 3493 1157 3494 1159 3495 1301 3496 1159 3497 1161 3498 1303 3499 1161 3500 1163 3501 1305 3502 1163 3503 1165 3504 1307 3505 1165 3506 1167 3507 1309 3508 1167 3509 1169 3510 1311 3511 1169 3512 1171 3513 1313 3514 1171 3515 1173 3516 1315 3517 1173 3518 1205 3519 1205 3520 1067 3521 1207 3522 1207 3523 1069 3524 1209 3525 1209 3526 1071 3527 1211 3528 1211 3529 1073 3530 1213 3531 1213 3532 1075 3533 1215 3534 1215 3535 1077 3536 1217 3537 1217 3538 1079 3539 1219 3540 1219 3541 1081 3542 1221 3543 1221 3544 1083 3545 1223 3546 1223 3547 1085 3548 1225 3549 1225 3550 1087 3551 1227 3552 1227 3553 1089 3554 1229 3555 1229 3556 1091 3557 1231 3558 1231 3559 1175 3560 1317 3561 1233 3562 1319 3563 1177 3564 1235 3565 1233 3566 1093 3567 1177 3568 1321 3569 1179 3570 1237 3571 1235 3572 1095 3573 1179 3574 1323 3575 1181 3576 1239 3577 1237 3578 1097 3579 1181 3580 1325 3581 1183 3582 1241 3583 1239 3584 1099 3585 1183 3586 1327 3587 1185 3588 1243 3589 1241 3590 1101 3591 1185 3592 1329 3593 1187 3594 1245 3595 1243 3596 1103 3597 1187 3598 1331 3599 1189 3600 1247 3601 1245 3602 1105 3603 1189 3604 1333 3605 1191 3606 1249 3607 1247 3608 1107 3609 1191 3610 1335 3611 1193 3612 1251 3613 1249 3614 1109 3615 1193 3616 1337 3617 1195 3618 1253 3619 1251 3620 1111 3621 1195 3622 1339 3623 1197 3624 1255 3625 1253 3626 1113 3627 1197 3628 1341 3629 1199 3630 1257 3631 1255 3632 1115 3633 1199 3634 1343 3635 1201 3636 1259 3637 1257 3638 1117 3639 1201 3640 1345 3641 1203 3642 1203 3643 1347 3644 1261 3645 1119 3646 1261 3647 1263 3648 1121 3649 1263 3650 1265 3651 1123 3652 1265 3653 1267 3654 1125 3655 1267 3656 1269 3657 1127 3658 1269 3659 1271 3660 1129 3661 1271 3662 1273 3663 1131 3664 1273 3665 1275 3666 1133 3667 1275 3668 1277 3669 1135 3670 1277 3671 1279 3672 1137 3673 1279 3674 1281 3675 1139 3676 1281 3677 1283 3678 1141 3679 1283 3680 1285 3681 1143 3682 1285 3683 1287 3684 1175 3685 1287 3686 1317 3687 1289 3688 1149 3689 1291 3690 1291 3691 1151 3692 1293 3693 1293 3694 1153 3695 1295 3696 1295 3697 1155 3698 1297 3699 1297 3700 1157 3701 1299 3702 1299 3703 1159 3704 1301 3705 1301 3706 1161 3707 1303 3708 1303 3709 1163 3710 1305 3711 1305 3712 1165 3713 1307 3714 1307 3715 1167 3716 1309 3717 1309 3718 1169 3719 1311 3720 1311 3721 1171 3722 1313 3723 1313 3724 1173 3725 1315 3726 1315 3727 1205 3728 1349 3729 1205 3730 1207 3731 1351 3732 1207 3733 1209 3734 1353 3735 1209 3736 1211 3737 1355 3738 1211 3739 1213 3740 1357 3741 1213 3742 1215 3743 1359 3744 1215 3745 1217 3746 1361 3747 1217 3748 1219 3749 1363 3750 1219 3751 1221 3752 1365 3753 1221 3754 1223 3755 1367 3756 1223 3757 1225 3758 1369 3759 1225 3760 1227 3761 1371 3762 1227 3763 1229 3764 1373 3765 1229 3766 1231 3767 1375 3768 1231 3769 1317 3770 1377 3771 1233 3772 1379 3773 1319 3774 1177 3775 1319 3776 1321 3777 1235 3778 1381 3779 1233 3780 1179 3781 1321 3782 1323 3783 1237 3784 1383 3785 1235 3786 1181 3787 1323 3788 1325 3789 1239 3790 1385 3791 1237 3792 1183 3793 1325 3794 1327 3795 1241 3796 1387 3797 1239 3798 1185 3799 1327 3800 1329 3801 1243 3802 1389 3803 1241 3804 1187 3805 1329 3806 1331 3807 1245 3808 1391 3809 1243 3810 1189 3811 1331 3812 1333 3813 1247 3814 1393 3815 1245 3816 1191 3817 1333 3818 1335 3819 1249 3820 1395 3821 1247 3822 1193 3823 1335 3824 1337 3825 1251 3826 1397 3827 1249 3828 1195 3829 1337 3830 1339 3831 1253 3832 1399 3833 1251 3834 1197 3835 1339 3836 1341 3837 1255 3838 1401 3839 1253 3840 1199 3841 1341 3842 1343 3843 1257 3844 1403 3845 1255 3846 1201 3847 1343 3848 1345 3849 1259 3850 1405 3851 1257 3852 1203 3853 1345 3854 1347 3855 1261 3856 1347 3857 1407 3858 1263 3859 1261 3860 1409 3861 1265 3862 1263 3863 1411 3864 1267 3865 1265 3866 1413 3867 1269 3868 1267 3869 1415 3870 1271 3871 1269 3872 1417 3873 1273 3874 1271 3875 1419 3876 1275 3877 1273 3878 1421 3879 1277 3880 1275 3881 1423 3882 1279 3883 1277 3884 1425 3885 1281 3886 1279 3887 1427 3888 1283 3889 1281 3890 1429 3891 1285 3892 1283 3893 1431 3894 1287 3895 1285 3896 1433 3897 1317 3898 1287 3899 1435 3900 1437 3901 1289 3902 1291 3903 1439 3904 1291 3905 1293 3906 1441 3907 1293 3908 1295 3909 1443 3910 1295 3911 1297 3912 1445 3913 1297 3914 1299 3915 1447 3916 1299 3917 1301 3918 1449 3919 1301 3920 1303 3921 1451 3922 1303 3923 1305 3924 1453 3925 1305 3926 1307 3927 1455 3928 1307 3929 1309 3930 1457 3931 1309 3932 1311 3933 1459 3934 1311 3935 1313 3936 1461 3937 1313 3938 1315 3939 1463 3940 1315 3941 1349 3942 1349 3943 1205 3944 1351 3945 1351 3946 1207 3947 1353 3948 1353 3949 1209 3950 1355 3951 1355 3952 1211 3953 1357 3954 1357 3955 1213 3956 1359 3957 1359 3958 1215 3959 1361 3960 1361 3961 1217 3962 1363 3963 1363 3964 1219 3965 1365 3966 1365 3967 1221 3968 1367 3969 1367 3970 1223 3971 1369 3972 1369 3973 1225 3974 1371 3975 1371 3976 1227 3977 1373 3978 1373 3979 1229 3980 1375 3981 1375 3982 1231 3983 1377 3984 1377 3985 1317 3986 1465 3987 1379 3988 1467 3989 1319 3990 1381 3991 1379 3992 1233 3993 1319 3994 1469 3995 1321 3996 1383 3997 1381 3998 1235 3999 1321 4000 1471 4001 1323 4002 1385 4003 1383 4004 1237 4005 1323 4006 1473 4007 1325 4008 1387 4009 1385 4010 1239 4011 1325 4012 1475 4013 1327 4014 1389 4015 1387 4016 1241 4017 1327 4018 1477 4019 1329 4020 1391 4021 1389 4022 1243 4023 1329 4024 1479 4025 1331 4026 1393 4027 1391 4028 1245 4029 1331 4030 1481 4031 1333 4032 1395 4033 1393 4034 1247 4035 1333 4036 1483 4037 1335 4038 1397 4039 1395 4040 1249 4041 1335 4042 1485 4043 1337 4044 1399 4045 1397 4046 1251 4047 1337 4048 1487 4049 1339 4050 1401 4051 1399 4052 1253 4053 1339 4054 1489 4055 1341 4056 1403 4057 1401 4058 1255 4059 1341 4060 1491 4061 1343 4062 1405 4063 1403 4064 1257 4065 1343 4066 1493 4067 1345 4068 1345 4069 1495 4070 1347 4071 1347 4072 1497 4073 1407 4074 1261 4075 1407 4076 1409 4077 1263 4078 1409 4079 1411 4080 1265 4081 1411 4082 1413 4083 1267 4084 1413 4085 1415 4086 1269 4087 1415 4088 1417 4089 1271 4090 1417 4091 1419 4092 1273 4093 1419 4094 1421 4095 1275 4096 1421 4097 1423 4098 1277 4099 1423 4100 1425 4101 1279 4102 1425 4103 1427 4104 1281 4105 1427 4106 1429 4107 1283 4108 1429 4109 1431 4110 1285 4111 1431 4112 1433 4113 1287 4114 1433 4115 1435 4116 1317 4117 1435 4118 1465 4119 1437 4120 1291 4121 1439 4122 1439 4123 1293 4124 1441 4125 1441 4126 1295 4127 1443 4128 1443 4129 1297 4130 1445 4131 1445 4132 1299 4133 1447 4134 1447 4135 1301 4136 1449 4137 1449 4138 1303 4139 1451 4140 1451 4141 1305 4142 1453 4143 1453 4144 1307 4145 1455 4146 1455 4147 1309 4148 1457 4149 1457 4150 1311 4151 1459 4152 1459 4153 1313 4154 1461 4155 1461 4156 1315 4157 1463 4158 1463 4159 1349 4160 1499 4161 1349 4162 1351 4163 1501 4164 1351 4165 1353 4166 1503 4167 1353 4168 1355 4169 1505 4170 1355 4171 1357 4172 1507 4173 1357 4174 1359 4175 1509 4176 1359 4177 1361 4178 1511 4179 1361 4180 1363 4181 1513 4182 1363 4183 1365 4184 1515 4185 1365 4186 1367 4187 1517 4188 1367 4189 1369 4190 1519 4191 1369 4192 1371 4193 1521 4194 1371 4195 1373 4196 1523 4197 1373 4198 1375 4199 1525 4200 1375 4201 1377 4202 1527 4203 1377 4204 1465 4205 1529 4206 1379 4207 1531 4208 1467 4209 1319 4210 1467 4211 1469 4212 1381 4213 1533 4214 1379 4215 1321 4216 1469 4217 1471 4218 1383 4219 1535 4220 1381 4221 1323 4222 1471 4223 1473 4224 1385 4225 1537 4226 1383 4227 1325 4228 1473 4229 1475 4230 1387 4231 1539 4232 1385 4233 1327 4234 1475 4235 1477 4236 1389 4237 1541 4238 1387 4239 1329 4240 1477 4241 1479 4242 1391 4243 1543 4244 1389 4245 1331 4246 1479 4247 1481 4248 1393 4249 1545 4250 1391 4251 1333 4252 1481 4253 1483 4254 1395 4255 1547 4256 1393 4257 1335 4258 1483 4259 1485 4260 1397 4261 1549 4262 1395 4263 1337 4264 1485 4265 1487 4266 1399 4267 1551 4268 1397 4269 1339 4270 1487 4271 1489 4272 1401 4273 1553 4274 1399 4275 1341 4276 1489 4277 1491 4278 1403 4279 1555 4280 1401 4281 1343 4282 1491 4283 1493 4284 1405 4285 1557 4286 1403 4287 1345 4288 1493 4289 1495 4290 1347 4291 1495 4292 1497 4293 1407 4294 1497 4295 1559 4296 1409 4297 1407 4298 1561 4299 1411 4300 1409 4301 1563 4302 1413 4303 1411 4304 1565 4305 1415 4306 1413 4307 1567 4308 1417 4309 1415 4310 1569 4311 1419 4312 1417 4313 1571 4314 1421 4315 1419 4316 1573 4317 1423 4318 1421 4319 1575 4320 1425 4321 1423 4322 1577 4323 1427 4324 1425 4325 1579 4326 1429 4327 1427 4328 1581 4329 1431 4330 1429 4331 1583 4332 1433 4333 1431 4334 1585 4335 1435 4336 1433 4337 1587 4338 1465 4339 1435 4340 1589 4341 1591 4342 1437 4343 1439 4344 1593 4345 1439 4346 1441 4347 1595 4348 1441 4349 1443 4350 1597 4351 1443 4352 1445 4353 1599 4354 1445 4355 1447 4356 1601 4357 1447 4358 1449 4359 1603 4360 1449 4361 1451 4362 1605 4363 1451 4364 1453 4365 1607 4366 1453 4367 1455 4368 1609 4369 1455 4370 1457 4371 1611 4372 1457 4373 1459 4374 1613 4375 1459 4376 1461 4377 1615 4378 1461 4379 1463 4380 1617 4381 1463 4382 1499 4383 1499 4384 1349 4385 1501 4386 1501 4387 1351 4388 1503 4389 1503 4390 1353 4391 1505 4392 1505 4393 1355 4394 1507 4395 1507 4396 1357 4397 1509 4398 1509 4399 1359 4400 1511 4401 1511 4402 1361 4403 1513 4404 1513 4405 1363 4406 1515 4407 1515 4408 1365 4409 1517 4410 1517 4411 1367 4412 1519 4413 1519 4414 1369 4415 1521 4416 1521 4417 1371 4418 1523 4419 1523 4420 1373 4421 1525 4422 1525 4423 1375 4424 1527 4425 1527 4426 1377 4427 1529 4428 1529 4429 1465 4430 1619 4431 1531 4432 1621 4433 1467 4434 1533 4435 1531 4436 1379 4437 1467 4438 1623 4439 1469 4440 1535 4441 1533 4442 1381 4443 1469 4444 1625 4445 1471 4446 1537 4447 1535 4448 1383 4449 1471 4450 1627 4451 1473 4452 1539 4453 1537 4454 1385 4455 1473 4456 1629 4457 1475 4458 1541 4459 1539 4460 1387 4461 1475 4462 1631 4463 1477 4464 1543 4465 1541 4466 1389 4467 1477 4468 1633 4469 1479 4470 1545 4471 1543 4472 1391 4473 1479 4474 1635 4475 1481 4476 1547 4477 1545 4478 1393 4479 1481 4480 1637 4481 1483 4482 1549 4483 1547 4484 1395 4485 1483 4486 1639 4487 1485 4488 1551 4489 1549 4490 1397 4491 1485 4492 1641 4493 1487 4494 1553 4495 1551 4496 1399 4497 1487 4498 1643 4499 1489 4500 1555 4501 1553 4502 1401 4503 1489 4504 1645 4505 1491 4506 1557 4507 1555 4508 1403 4509 1491 4510 1647 4511 1493 4512 1493 4513 1649 4514 1495 4515 1495 4516 1651 4517 1497 4518 1497 4519 1653 4520 1559 4521 1407 4522 1559 4523 1561 4524 1409 4525 1561 4526 1563 4527 1411 4528 1563 4529 1565 4530 1413 4531 1565 4532 1567 4533 1415 4534 1567 4535 1569 4536 1417 4537 1569 4538 1571 4539 1419 4540 1571 4541 1573 4542 1421 4543 1573 4544 1575 4545 1423 4546 1575 4547 1577 4548 1425 4549 1577 4550 1579 4551 1427 4552 1579 4553 1581 4554 1429 4555 1581 4556 1583 4557 1431 4558 1583 4559 1585 4560 1433 4561 1585 4562 1587 4563 1435 4564 1587 4565 1589 4566 1465 4567 1589 4568 1619 4569 1591 4570 1439 4571 1593 4572 1593 4573 1441 4574 1595 4575 1595 4576 1443 4577 1597 4578 1597 4579 1445 4580 1599 4581 1599 4582 1447 4583 1601 4584 1601 4585 1449 4586 1603 4587 1603 4588 1451 4589 1605 4590 1605 4591 1453 4592 1607 4593 1607 4594 1455 4595 1609 4596 1609 4597 1457 4598 1611 4599 1611 4600 1459 4601 1613 4602 1613 4603 1461 4604 1615 4605 1615 4606 1463 4607 1617 4608 1617 4609 1499 4610 1655 4611 1499 4612 1501 4613 1657 4614 1501 4615 1503 4616 1659 4617 1503 4618 1505 4619 1661 4620 1505 4621 1507 4622 1663 4623 1507 4624 1509 4625 1665 4626 1509 4627 1511 4628 1667 4629 1511 4630 1513 4631 1669 4632 1513 4633 1515 4634 1671 4635 1515 4636 1517 4637 1673 4638 1517 4639 1519 4640 1675 4641 1519 4642 1521 4643 1677 4644 1521 4645 1523 4646 1679 4647 1523 4648 1525 4649 1681 4650 1525 4651 1527 4652 1683 4653 1527 4654 1529 4655 1685 4656 1529 4657 1619 4658 1687 4659 1531 4660 1689 4661 1621 4662 1467 4663 1621 4664 1623 4665 1533 4666 1691 4667 1531 4668 1469 4669 1623 4670 1625 4671 1535 4672 1693 4673 1533 4674 1471 4675 1625 4676 1627 4677 1537 4678 1695 4679 1535 4680 1473 4681 1627 4682 1629 4683 1539 4684 1697 4685 1537 4686 1475 4687 1629 4688 1631 4689 1541 4690 1699 4691 1539 4692 1477 4693 1631 4694 1633 4695 1543 4696 1701 4697 1541 4698 1479 4699 1633 4700 1635 4701 1545 4702 1703 4703 1543 4704 1481 4705 1635 4706 1637 4707 1547 4708 1705 4709 1545 4710 1483 4711 1637 4712 1639 4713 1549 4714 1707 4715 1547 4716 1485 4717 1639 4718 1641 4719 1551 4720 1709 4721 1549 4722 1487 4723 1641 4724 1643 4725 1553 4726 1711 4727 1551 4728 1489 4729 1643 4730 1645 4731 1555 4732 1713 4733 1553 4734 1491 4735 1645 4736 1647 4737 1557 4738 1715 4739 1555 4740 1493 4741 1647 4742 1649 4743 1495 4744 1649 4745 1651 4746 1497 4747 1651 4748 1653 4749 1559 4750 1653 4751 1717 4752 1561 4753 1559 4754 1719 4755 1563 4756 1561 4757 1721 4758 1565 4759 1563 4760 1723 4761 1567 4762 1565 4763 1725 4764 1569 4765 1567 4766 1727 4767 1571 4768 1569 4769 1729 4770 1573 4771 1571 4772 1731 4773 1575 4774 1573 4775 1733 4776 1577 4777 1575 4778 1735 4779 1579 4780 1577 4781 1737 4782 1581 4783 1579 4784 1739 4785 1583 4786 1581 4787 1741 4788 1585 4789 1583 4790 1743 4791 1587 4792 1585 4793 1745 4794 1589 4795 1587 4796 1747 4797 1619 4798 1589 4799 1749 4800 1751 4801 1591 4802 1593 4803 1753 4804 1593 4805 1595 4806 1755 4807 1595 4808 1597 4809 1757 4810 1597 4811 1599 4812 1759 4813 1599 4814 1601 4815 1761 4816 1601 4817 1603 4818 1763 4819 1603 4820 1605 4821 1765 4822 1605 4823 1607 4824 1767 4825 1607 4826 1609 4827 1769 4828 1609 4829 1611 4830 1771 4831 1611 4832 1613 4833 1773 4834 1613 4835 1615 4836 1775 4837 1615 4838 1617 4839 1777 4840 1617 4841 1655 4842 1655 4843 1499 4844 1657 4845 1657 4846 1501 4847 1659 4848 1659 4849 1503 4850 1661 4851 1661 4852 1505 4853 1663 4854 1663 4855 1507 4856 1665 4857 1665 4858 1509 4859 1667 4860 1667 4861 1511 4862 1669 4863 1669 4864 1513 4865 1671 4866 1671 4867 1515 4868 1673 4869 1673 4870 1517 4871 1675 4872 1675 4873 1519 4874 1677 4875 1677 4876 1521 4877 1679 4878 1679 4879 1523 4880 1681 4881 1681 4882 1525 4883 1683 4884 1683 4885 1527 4886 1685 4887 1685 4888 1529 4889 1687 4890 1687 4891 1619 4892 1779 4893 1689 4894 1781 4895 1621 4896 1691 4897 1689 4898 1531 4899 1621 4900 1783 4901 1623 4902 1693 4903 1691 4904 1533 4905 1623 4906 1785 4907 1625 4908 1695 4909 1693 4910 1535 4911 1625 4912 1787 4913 1627 4914 1697 4915 1695 4916 1537 4917 1627 4918 1789 4919 1629 4920 1699 4921 1697 4922 1539 4923 1629 4924 1791 4925 1631 4926 1701 4927 1699 4928 1541 4929 1631 4930 1793 4931 1633 4932 1703 4933 1701 4934 1543 4935 1633 4936 1795 4937 1635 4938 1705 4939 1703 4940 1545 4941 1635 4942 1797 4943 1637 4944 1707 4945 1705 4946 1547 4947 1637 4948 1799 4949 1639 4950 1709 4951 1707 4952 1549 4953 1639 4954 1801 4955 1641 4956 1711 4957 1709 4958 1551 4959 1641 4960 1803 4961 1643 4962 1713 4963 1711 4964 1553 4965 1643 4966 1805 4967 1645 4968 1715 4969 1713 4970 1555 4971 1645 4972 1807 4973 1647 4974 1647 4975 1809 4976 1649 4977 1649 4978 1811 4979 1651 4980 1651 4981 1813 4982 1653 4983 1653 4984 1815 4985 1717 4986 1559 4987 1717 4988 1719 4989 1561 4990 1719 4991 1721 4992 1563 4993 1721 4994 1723 4995 1565 4996 1723 4997 1725 4998 1567 4999 1725 5000 1727 5001 1569 5002 1727 5003 1729 5004 1571 5005 1729 5006 1731 5007 1573 5008 1731 5009 1733 5010 1575 5011 1733 5012 1735 5013 1577 5014 1735 5015 1737 5016 1579 5017 1737 5018 1739 5019 1581 5020 1739 5021 1741 5022 1583 5023 1741 5024 1743 5025 1585 5026 1743 5027 1745 5028 1587 5029 1745 5030 1747 5031 1589 5032 1747 5033 1749 5034 1619 5035 1749 5036 1779 5037 1751 5038 1593 5039 1753 5040 1753 5041 1595 5042 1755 5043 1755 5044 1597 5045 1757 5046 1757 5047 1599 5048 1759 5049 1759 5050 1601 5051 1761 5052 1761 5053 1603 5054 1763 5055 1763 5056 1605 5057 1765 5058 1765 5059 1607 5060 1767 5061 1767 5062 1609 5063 1769 5064 1769 5065 1611 5066 1771 5067 1771 5068 1613 5069 1773 5070 1773 5071 1615 5072 1775 5073 1775 5074 1617 5075 1777 5076 1777 5077 1655 5078 1817 5079 1655 5080 1657 5081 1819 5082 1657 5083 1659 5084 1821 5085 1659 5086 1661 5087 1823 5088 1661 5089 1663 5090 1825 5091 1663 5092 1665 5093 1827 5094 1665 5095 1667 5096 1829 5097 1667 5098 1669 5099 1831 5100 1669 5101 1671 5102 1833 5103 1671 5104 1673 5105 1835 5106 1673 5107 1675 5108 1837 5109 1675 5110 1677 5111 1839 5112 1677 5113 1679 5114 1841 5115 1679 5116 1681 5117 1843 5118 1681 5119 1683 5120 1845 5121 1683 5122 1685 5123 1847 5124 1685 5125 1687 5126 1849 5127 1687 5128 1779 5129 1851 5130 1689 5131 1853 5132 1781 5133 1621 5134 1781 5135 1783 5136 1691 5137 1855 5138 1689 5139 1623 5140 1783 5141 1785 5142 1693 5143 1857 5144 1691 5145 1625 5146 1785 5147 1787 5148 1695 5149 1859 5150 1693 5151 1627 5152 1787 5153 1789 5154 1697 5155 1861 5156 1695 5157 1629 5158 1789 5159 1791 5160 1699 5161 1863 5162 1697 5163 1631 5164 1791 5165 1793 5166 1701 5167 1865 5168 1699 5169 1633 5170 1793 5171 1795 5172 1703 5173 1867 5174 1701 5175 1635 5176 1795 5177 1797 5178 1705 5179 1869 5180 1703 5181 1637 5182 1797 5183 1799 5184 1707 5185 1871 5186 1705 5187 1639 5188 1799 5189 1801 5190 1709 5191 1873 5192 1707 5193 1641 5194 1801 5195 1803 5196 1711 5197 1875 5198 1709 5199 1643 5200 1803 5201 1805 5202 1713 5203 1877 5204 1711 5205 1645 5206 1805 5207 1807 5208 1715 5209 1879 5210 1713 5211 1647 5212 1807 5213 1809 5214 1649 5215 1809 5216 1811 5217 1651 5218 1811 5219 1813 5220 1653 5221 1813 5222 1815 5223 1717 5224 1815 5225 1881 5226 1719 5227 1717 5228 1883 5229 1721 5230 1719 5231 1885 5232 1723 5233 1721 5234 1887 5235 1725 5236 1723 5237 1889 5238 1727 5239 1725 5240 1891 5241 1729 5242 1727 5243 1893 5244 1731 5245 1729 5246 1895 5247 1733 5248 1731 5249 1897 5250 1735 5251 1733 5252 1899 5253 1737 5254 1735 5255 1901 5256 1739 5257 1737 5258 1903 5259 1741 5260 1739 5261 1905 5262 1743 5263 1741 5264 1907 5265 1745 5266 1743 5267 1909 5268 1747 5269 1745 5270 1911 5271 1749 5272 1747 5273 1913 5274 1779 5275 1749 5276 1915 5277 1917 5278 1751 5279 1753 5280 1919 5281 1753 5282 1755 5283 1921 5284 1755 5285 1757 5286 1923 5287 1757 5288 1759 5289 1925 5290 1759 5291 1761 5292 1927 5293 1761 5294 1763 5295 1929 5296 1763 5297 1765 5298 1931 5299 1765 5300 1767 5301 1933 5302 1767 5303 1769 5304 1935 5305 1769 5306 1771 5307 1937 5308 1771 5309 1773 5310 1939 5311 1773 5312 1775 5313 1941 5314 1775 5315 1777 5316 1943 5317 1777 5318 1817 5319 1817 5320 1655 5321 1819 5322 1819 5323 1657 5324 1821 5325 1821 5326 1659 5327 1823 5328 1823 5329 1661 5330 1825 5331 1825 5332 1663 5333 1827 5334 1827 5335 1665 5336 1829 5337 1829 5338 1667 5339 1831 5340 1831 5341 1669 5342 1833 5343 1833 5344 1671 5345 1835 5346 1835 5347 1673 5348 1837 5349 1837 5350 1675 5351 1839 5352 1839 5353 1677 5354 1841 5355 1841 5356 1679 5357 1843 5358 1843 5359 1681 5360 1845 5361 1845 5362 1683 5363 1847 5364 1847 5365 1685 5366 1849 5367 1849 5368 1687 5369 1851 5370 1851 5371 1779 5372 1945 5373 1853 5374 1947 5375 1781 5376 1855 5377 1853 5378 1689 5379 1781 5380 1949 5381 1783 5382 1857 5383 1855 5384 1691 5385 1783 5386 1951 5387 1785 5388 1859 5389 1857 5390 1693 5391 1785 5392 1953 5393 1787 5394 1861 5395 1859 5396 1695 5397 1787 5398 1955 5399 1789 5400 1863 5401 1861 5402 1697 5403 1789 5404 1957 5405 1791 5406 1865 5407 1863 5408 1699 5409 1791 5410 1959 5411 1793 5412 1867 5413 1865 5414 1701 5415 1793 5416 1961 5417 1795 5418 1869 5419 1867 5420 1703 5421 1795 5422 1963 5423 1797 5424 1871 5425 1869 5426 1705 5427 1797 5428 1965 5429 1799 5430 1873 5431 1871 5432 1707 5433 1799 5434 1967 5435 1801 5436 1875 5437 1873 5438 1709 5439 1801 5440 1969 5441 1803 5442 1877 5443 1875 5444 1711 5445 1803 5446 1971 5447 1805 5448 1879 5449 1877 5450 1713 5451 1805 5452 1973 5453 1807 5454 1807 5455 1975 5456 1809 5457 1809 5458 1977 5459 1811 5460 1811 5461 1979 5462 1813 5463 1813 5464 1981 5465 1815 5466 1815 5467 1983 5468 1881 5469 1717 5470 1881 5471 1883 5472 1719 5473 1883 5474 1885 5475 1721 5476 1885 5477 1887 5478 1723 5479 1887 5480 1889 5481 1725 5482 1889 5483 1891 5484 1727 5485 1891 5486 1893 5487 1729 5488 1893 5489 1895 5490 1731 5491 1895 5492 1897 5493 1733 5494 1897 5495 1899 5496 1735 5497 1899 5498 1901 5499 1737 5500 1901 5501 1903 5502 1739 5503 1903 5504 1905 5505 1741 5506 1905 5507 1907 5508 1743 5509 1907 5510 1909 5511 1745 5512 1909 5513 1911 5514 1747 5515 1911 5516 1913 5517 1749 5518 1913 5519 1915 5520 1779 5521 1915 5522 1945 5523 1917 5524 1753 5525 1919 5526 1919 5527 1755 5528 1921 5529 1921 5530 1757 5531 1923 5532 1923 5533 1759 5534 1925 5535 1925 5536 1761 5537 1927 5538 1927 5539 1763 5540 1929 5541 1929 5542 1765 5543 1931 5544 1931 5545 1767 5546 1933 5547 1933 5548 1769 5549 1935 5550 1935 5551 1771 5552 1937 5553 1937 5554 1773 5555 1939 5556 1939 5557 1775 5558 1941 5559 1941 5560 1777 5561 1943 5562 1853 5563 1985 5564 1947 5565 1781 5566 1947 5567 1949 5568 1855 5569 1987 5570 1853 5571 1783 5572 1949 5573 1951 5574 1857 5575 1989 5576 1855 5577 1785 5578 1951 5579 1953 5580 1859 5581 1991 5582 1857 5583 1787 5584 1953 5585 1955 5586 1861 5587 1993 5588 1859 5589 1789 5590 1955 5591 1957 5592 1863 5593 1995 5594 1861 5595 1791 5596 1957 5597 1959 5598 1865 5599 1997 5600 1863 5601 1793 5602 1959 5603 1961 5604 1867 5605 1999 5606 1865 5607 1795 5608 1961 5609 1963 5610 1869 5611 2001 5612 1867 5613 1797 5614 1963 5615 1965 5616 1871 5617 2003 5618 1869 5619 1799 5620 1965 5621 1967 5622 1873 5623 2005 5624 1871 5625 1801 5626 1967 5627 1969 5628 1875 5629 2007 5630 1873 5631 1803 5632 1969 5633 1971 5634 1877 5635 2009 5636 1875 5637 1805 5638 1971 5639 1973 5640 1879 5641 2011 5642 1877 5643 1807 5644 1973 5645 1975 5646 1809 5647 1975 5648 1977 5649 1811 5650 1977 5651 1979 5652 1813 5653 1979 5654 1981 5655 1815 5656 1981 5657 1983 5658 1881 5659 1983 5660 2013 5661 1883 5662 1881 5663 2015 5664 1885 5665 1883 5666 2017 5667 1887 5668 1885 5669 2019 5670 1889 5671 1887 5672 2021 5673 1891 5674 1889 5675 2023 5676 1893 5677 1891 5678 2025 5679 1895 5680 1893 5681 2027 5682 1897 5683 1895 5684 2029 5685 1899 5686 1897 5687 2031 5688 1901 5689 1899 5690 2033 5691 1903 5692 1901 5693 2035 5694 1905 5695 1903 5696 2037 5697 1907 5698 1905 5699 2039 5700 1909 5701 1907 5702 2041 5703 1911 5704 1909 5705 2043 5706 1913 5707 1911 5708 2045 5709 1915 5710 1913 5711 2047 5712 1945 5713 1915 5714 2049 5715 2051 5716 1917 5717 1919 5718 2053 5719 1919 5720 1921 5721 2055 5722 1921 5723 1923 5724 2057 5725 1923 5726 1925 5727 2059 5728 1925 5729 1927 5730 2061 5731 1927 5732 1929 5733 2063 5734 1929 5735 1931 5736 2065 5737 1931 5738 1933 5739 2067 5740 1933 5741 1935 5742 2069 5743 1935 5744 1937 5745 2071 5746 1937 5747 1939 5748 2073 5749 1939 5750 1941 5751 2075 5752 1941 5753 1943 5754 1985 5755 2077 5756 1947 5757 1987 5758 1985 5759 1853 5760 1947 5761 2079 5762 1949 5763 1989 5764 1987 5765 1855 5766 1949 5767 2081 5768 1951 5769 1991 5770 1989 5771 1857 5772 1951 5773 2083 5774 1953 5775 1993 5776 1991 5777 1859 5778 1953 5779 2085 5780 1955 5781 1995 5782 1993 5783 1861 5784 1955 5785 2087 5786 1957 5787 1997 5788 1995 5789 1863 5790 1957 5791 2089 5792 1959 5793 1999 5794 1997 5795 1865 5796 1959 5797 2091 5798 1961 5799 2001 5800 1999 5801 1867 5802 1961 5803 2093 5804 1963 5805 2003 5806 2001 5807 1869 5808 1963 5809 2095 5810 1965 5811 2005 5812 2003 5813 1871 5814 1965 5815 2097 5816 1967 5817 2007 5818 2005 5819 1873 5820 1967 5821 2099 5822 1969 5823 2009 5824 2007 5825 1875 5826 1969 5827 2101 5828 1971 5829 2011 5830 2009 5831 1877 5832 1971 5833 2103 5834 1973 5835 1973 5836 2105 5837 1975 5838 1975 5839 2107 5840 1977 5841 1977 5842 2109 5843 1979 5844 1979 5845 2111 5846 1981 5847 1981 5848 2113 5849 1983 5850 1983 5851 2115 5852 2013 5853 1881 5854 2013 5855 2015 5856 1883 5857 2015 5858 2017 5859 1885 5860 2017 5861 2019 5862 1887 5863 2019 5864 2021 5865 1889 5866 2021 5867 2023 5868 1891 5869 2023 5870 2025 5871 1893 5872 2025 5873 2027 5874 1895 5875 2027 5876 2029 5877 1897 5878 2029 5879 2031 5880 1899 5881 2031 5882 2033 5883 1901 5884 2033 5885 2035 5886 1903 5887 2035 5888 2037 5889 1905 5890 2037 5891 2039 5892 1907 5893 2039 5894 2041 5895 1909 5896 2041 5897 2043 5898 1911 5899 2043 5900 2045 5901 1913 5902 2045 5903 2047 5904 1915 5905 2047 5906 2049 5907 2051 5908 1919 5909 2053 5910 2053 5911 1921 5912 2055 5913 2055 5914 1923 5915 2057 5916 2057 5917 1925 5918 2059 5919 2059 5920 1927 5921 2061 5922 2061 5923 1929 5924 2063 5925 2063 5926 1931 5927 2065 5928 2065 5929 1933 5930 2067 5931 2067 5932 1935 5933 2069 5934 2069 5935 1937 5936 2071 5937 2071 5938 1939 5939 2073 5940 2073 5941 1941 5942 2075 5943 1985 5944 2117 5945 2077 5946 1947 5947 2077 5948 2079 5949 1987 5950 2119 5951 1985 5952 1949 5953 2079 5954 2081 5955 1989 5956 2121 5957 1987 5958 1951 5959 2081 5960 2083 5961 1991 5962 2123 5963 1989 5964 1953 5965 2083 5966 2085 5967 1993 5968 2125 5969 1991 5970 1955 5971 2085 5972 2087 5973 1995 5974 2127 5975 1993 5976 1957 5977 2087 5978 2089 5979 1997 5980 2129 5981 1995 5982 1959 5983 2089 5984 2091 5985 1999 5986 2131 5987 1997 5988 1961 5989 2091 5990 2093 5991 2001 5992 2133 5993 1999 5994 1963 5995 2093 5996 2095 5997 2003 5998 2135 5999 2001 6000 1965 6001 2095 6002 2097 6003 2005 6004 2137 6005 2003 6006 1967 6007 2097 6008 2099 6009 2007 6010 2139 6011 2005 6012 1969 6013 2099 6014 2101 6015 2009 6016 2141 6017 2007 6018 1971 6019 2101 6020 2103 6021 2011 6022 2143 6023 2009 6024 1973 6025 2103 6026 2105 6027 1975 6028 2105 6029 2107 6030 1977 6031 2107 6032 2109 6033 1979 6034 2109 6035 2111 6036 1981 6037 2111 6038 2113 6039 1983 6040 2113 6041 2115 6042 2013 6043 2115 6044 2145 6045 2015 6046 2013 6047 2147 6048 2017 6049 2015 6050 2149 6051 2019 6052 2017 6053 2151 6054 2021 6055 2019 6056 2153 6057 2023 6058 2021 6059 2155 6060 2025 6061 2023 6062 2157 6063 2027 6064 2025 6065 2159 6066 2029 6067 2027 6068 2161 6069 2031 6070 2029 6071 2163 6072 2033 6073 2031 6074 2165 6075 2035 6076 2033 6077 2167 6078 2037 6079 2035 6080 2169 6081 2039 6082 2037 6083 2171 6084 2041 6085 2039 6086 2173 6087 2043 6088 2041 6089 2175 6090 2045 6091 2043 6092 2177 6093 2047 6094 2045 6095 2179 6096 2049 6097 2047 6098 2181 6099 2183 6100 2051 6101 2053 6102 2185 6103 2053 6104 2055 6105 2187 6106 2055 6107 2057 6108 2189 6109 2057 6110 2059 6111 2191 6112 2059 6113 2061 6114 2193 6115 2061 6116 2063 6117 2195 6118 2063 6119 2065 6120 2197 6121 2065 6122 2067 6123 2199 6124 2067 6125 2069 6126 2201 6127 2069 6128 2071 6129 2203 6130 2071 6131 2073 6132 2205 6133 2073 6134 2075 6135 2117 6136 2207 6137 2077 6138 2119 6139 2117 6140 1985 6141 2077 6142 2209 6143 2079 6144 2121 6145 2119 6146 1987 6147 2079 6148 2211 6149 2081 6150 2123 6151 2121 6152 1989 6153 2081 6154 2213 6155 2083 6156 2125 6157 2123 6158 1991 6159 2083 6160 2215 6161 2085 6162 2127 6163 2125 6164 1993 6165 2085 6166 2217 6167 2087 6168 2129 6169 2127 6170 1995 6171 2087 6172 2219 6173 2089 6174 2131 6175 2129 6176 1997 6177 2089 6178 2221 6179 2091 6180 2133 6181 2131 6182 1999 6183 2091 6184 2223 6185 2093 6186 2135 6187 2133 6188 2001 6189 2093 6190 2225 6191 2095 6192 2137 6193 2135 6194 2003 6195 2095 6196 2227 6197 2097 6198 2139 6199 2137 6200 2005 6201 2097 6202 2229 6203 2099 6204 2141 6205 2139 6206 2007 6207 2099 6208 2231 6209 2101 6210 2143 6211 2141 6212 2009 6213 2101 6214 2233 6215 2103 6216 2103 6217 2235 6218 2105 6219 2105 6220 2237 6221 2107 6222 2107 6223 2239 6224 2109 6225 2109 6226 2241 6227 2111 6228 2111 6229 2243 6230 2113 6231 2113 6232 2245 6233 2115 6234 2115 6235 2247 6236 2145 6237 2013 6238 2145 6239 2147 6240 2015 6241 2147 6242 2149 6243 2017 6244 2149 6245 2151 6246 2019 6247 2151 6248 2153 6249 2021 6250 2153 6251 2155 6252 2023 6253 2155 6254 2157 6255 2025 6256 2157 6257 2159 6258 2027 6259 2159 6260 2161 6261 2029 6262 2161 6263 2163 6264 2031 6265 2163 6266 2165 6267 2033 6268 2165 6269 2167 6270 2035 6271 2167 6272 2169 6273 2037 6274 2169 6275 2171 6276 2039 6277 2171 6278 2173 6279 2041 6280 2173 6281 2175 6282 2043 6283 2175 6284 2177 6285 2045 6286 2177 6287 2179 6288 2047 6289 2179 6290 2181 6291 2183 6292 2053 6293 2185 6294 2185 6295 2055 6296 2187 6297 2187 6298 2057 6299 2189 6300 2189 6301 2059 6302 2191 6303 2191 6304 2061 6305 2193 6306 2193 6307 2063 6308 2195 6309 2195 6310 2065 6311 2197 6312 2197 6313 2067 6314 2199 6315 2199 6316 2069 6317 2201 6318 2201 6319 2071 6320 2203 6321 2203 6322 2073 6323 2205 6324 2117 6325 2249 6326 2207 6327 2077 6328 2207 6329 2209 6330 2119 6331 2251 6332 2117 6333 2079 6334 2209 6335 2211 6336 2121 6337 2253 6338 2119 6339 2081 6340 2211 6341 2213 6342 2123 6343 2255 6344 2121 6345 2083 6346 2213 6347 2215 6348 2125 6349 2257 6350 2123 6351 2085 6352 2215 6353 2217 6354 2127 6355 2259 6356 2125 6357 2087 6358 2217 6359 2219 6360 2129 6361 2261 6362 2127 6363 2089 6364 2219 6365 2221 6366 2131 6367 2263 6368 2129 6369 2091 6370 2221 6371 2223 6372 2133 6373 2265 6374 2131 6375 2093 6376 2223 6377 2225 6378 2135 6379 2267 6380 2133 6381 2095 6382 2225 6383 2227 6384 2137 6385 2269 6386 2135 6387 2097 6388 2227 6389 2229 6390 2139 6391 2271 6392 2137 6393 2099 6394 2229 6395 2231 6396 2141 6397 2273 6398 2139 6399 2101 6400 2231 6401 2233 6402 2143 6403 2275 6404 2141 6405 2103 6406 2233 6407 2235 6408 2105 6409 2235 6410 2237 6411 2107 6412 2237 6413 2239 6414 2109 6415 2239 6416 2241 6417 2111 6418 2241 6419 2243 6420 2113 6421 2243 6422 2245 6423 2115 6424 2245 6425 2247 6426 2145 6427 2247 6428 2277 6429 2147 6430 2145 6431 2279 6432 2149 6433 2147 6434 2281 6435 2151 6436 2149 6437 2283 6438 2153 6439 2151 6440 2285 6441 2155 6442 2153 6443 2287 6444 2157 6445 2155 6446 2289 6447 2159 6448 2157 6449 2291 6450 2161 6451 2159 6452 2293 6453 2163 6454 2161 6455 2295 6456 2165 6457 2163 6458 2297 6459 2167 6460 2165 6461 2299 6462 2169 6463 2167 6464 2301 6465 2171 6466 2169 6467 2303 6468 2173 6469 2171 6470 2305 6471 2175 6472 2173 6473 2307 6474 2177 6475 2175 6476 2309 6477 2179 6478 2177 6479 2311 6480 2181 6481 2179 6482 2313 6483 2315 6484 2183 6485 2185 6486 2317 6487 2185 6488 2187 6489 2319 6490 2187 6491 2189 6492 2321 6493 2189 6494 2191 6495 2323 6496 2191 6497 2193 6498 2325 6499 2193 6500 2195 6501 2327 6502 2195 6503 2197 6504 2329 6505 2197 6506 2199 6507 2331 6508 2199 6509 2201 6510 2333 6511 2201 6512 2203 6513 2335 6514 2203 6515 2205 6516 2249 6517 2337 6518 2207 6519 2251 6520 2249 6521 2117 6522 2207 6523 2339 6524 2209 6525 2253 6526 2251 6527 2119 6528 2209 6529 2341 6530 2211 6531 2255 6532 2253 6533 2121 6534 2211 6535 2343 6536 2213 6537 2257 6538 2255 6539 2123 6540 2213 6541 2345 6542 2215 6543 2259 6544 2257 6545 2125 6546 2215 6547 2347 6548 2217 6549 2261 6550 2259 6551 2127 6552 2217 6553 2349 6554 2219 6555 2263 6556 2261 6557 2129 6558 2219 6559 2351 6560 2221 6561 2265 6562 2263 6563 2131 6564 2221 6565 2353 6566 2223 6567 2267 6568 2265 6569 2133 6570 2223 6571 2355 6572 2225 6573 2269 6574 2267 6575 2135 6576 2225 6577 2357 6578 2227 6579 2271 6580 2269 6581 2137 6582 2227 6583 2359 6584 2229 6585 2273 6586 2271 6587 2139 6588 2229 6589 2361 6590 2231 6591 2275 6592 2273 6593 2141 6594 2231 6595 2363 6596 2233 6597 2233 6598 2365 6599 2235 6600 2235 6601 2367 6602 2237 6603 2237 6604 2369 6605 2239 6606 2239 6607 2371 6608 2241 6609 2241 6610 2373 6611 2243 6612 2243 6613 2375 6614 2245 6615 2245 6616 2377 6617 2247 6618 2247 6619 2379 6620 2277 6621 2145 6622 2277 6623 2279 6624 2147 6625 2279 6626 2281 6627 2149 6628 2281 6629 2283 6630 2151 6631 2283 6632 2285 6633 2153 6634 2285 6635 2287 6636 2155 6637 2287 6638 2289 6639 2157 6640 2289 6641 2291 6642 2159 6643 2291 6644 2293 6645 2161 6646 2293 6647 2295 6648 2163 6649 2295 6650 2297 6651 2165 6652 2297 6653 2299 6654 2167 6655 2299 6656 2301 6657 2169 6658 2301 6659 2303 6660 2171 6661 2303 6662 2305 6663 2173 6664 2305 6665 2307 6666 2175 6667 2307 6668 2309 6669 2177 6670 2309 6671 2311 6672 2179 6673 2311 6674 2313 6675 2315 6676 2185 6677 2317 6678 2317 6679 2187 6680 2319 6681 2319 6682 2189 6683 2321 6684 2321 6685 2191 6686 2323 6687 2323 6688 2193 6689 2325 6690 2325 6691 2195 6692 2327 6693 2327 6694 2197 6695 2329 6696 2329 6697 2199 6698 2331 6699 2331 6700 2201 6701 2333 6702 2333 6703 2203 6704 2335 6705 2207 6706 2337 6707 2339 6708 2209 6709 2339 6710 2341 6711 2211 6712 2341 6713 2343 6714 2213 6715 2343 6716 2345 6717 2215 6718 2345 6719 2347 6720 2217 6721 2347 6722 2349 6723 2219 6724 2349 6725 2351 6726 2221 6727 2351 6728 2353 6729 2223 6730 2353 6731 2355 6732 2225 6733 2355 6734 2357 6735 2227 6736 2357 6737 2359 6738 2229 6739 2359 6740 2361 6741 2231 6742 2361 6743 2363 6744 2233 6745 2363 6746 2365 6747 2235 6748 2365 6749 2367 6750 2237 6751 2367 6752 2369 6753 2239 6754 2369 6755 2371 6756 2241 6757 2371 6758 2373 6759 2243 6760 2373 6761 2375 6762 2245 6763 2375 6764 2377 6765 2247 6766 2377 6767 2379 6768 2277 6769 2379 6770 2381 6771 2279 6772 2277 6773 2383 6774 2281 6775 2279 6776 2385 6777 2283 6778 2281 6779 2387 6780 2285 6781 2283 6782 2389 6783 2287 6784 2285 6785 2391 6786 2289 6787 2287 6788 2393 6789 2291 6790 2289 6791 2395 6792 2293 6793 2291 6794 2397 6795 2295 6796 2293 6797 2399 6798 2297 6799 2295 6800 2401 6801 2299 6802 2297 6803 2403 6804 2301 6805 2299 6806 2405 6807 2303 6808 2301 6809 2407 6810 2305 6811 2303 6812 2409 6813 2307 6814 2305 6815 2411 6816 2309 6817 2307 6818 2413 6819 2311 6820 2309 6821 2415 6822 2313 6823 2311 6824 2417 6825 2419 6826 2315 6827 2317 6828 2421 6829 2317 6830 2319 6831 2423 6832 2319 6833 2321 6834 2425 6835 2321 6836 2323 6837 2427 6838 2323 6839 2325 6840 2429 6841 2325 6842 2327 6843 2431 6844 2327 6845 2329 6846 2433 6847 2329 6848 2331 6849 2435 6850 2331 6851 2333 6852 2437 6853 2333 6854 2335 6855 2337 6856 2439 6857 2339 6858 2339 6859 2441 6860 2341 6861 2341 6862 2443 6863 2343 6864 2343 6865 2445 6866 2345 6867 2345 6868 2447 6869 2347 6870 2347 6871 2449 6872 2349 6873 2349 6874 2451 6875 2351 6876 2351 6877 2453 6878 2353 6879 2353 6880 2455 6881 2355 6882 2355 6883 2457 6884 2357 6885 2357 6886 2459 6887 2359 6888 2359 6889 2461 6890 2361 6891 2361 6892 2463 6893 2363 6894 2363 6895 2465 6896 2365 6897 2365 6898 2467 6899 2367 6900 2367 6901 2469 6902 2369 6903 2369 6904 2471 6905 2371 6906 2371 6907 2473 6908 2373 6909 2373 6910 2475 6911 2375 6912 2375 6913 2477 6914 2377 6915 2377 6916 2479 6917 2379 6918 2379 6919 2481 6920 2381 6921 2277 6922 2381 6923 2383 6924 2279 6925 2383 6926 2385 6927 2281 6928 2385 6929 2387 6930 2283 6931 2387 6932 2389 6933 2285 6934 2389 6935 2391 6936 2287 6937 2391 6938 2393 6939 2289 6940 2393 6941 2395 6942 2291 6943 2395 6944 2397 6945 2293 6946 2397 6947 2399 6948 2295 6949 2399 6950 2401 6951 2297 6952 2401 6953 2403 6954 2299 6955 2403 6956 2405 6957 2301 6958 2405 6959 2407 6960 2303 6961 2407 6962 2409 6963 2305 6964 2409 6965 2411 6966 2307 6967 2411 6968 2413 6969 2309 6970 2413 6971 2415 6972 2311 6973 2415 6974 2417 6975 2419 6976 2317 6977 2421 6978 2421 6979 2319 6980 2423 6981 2423 6982 2321 6983 2425 6984 2425 6985 2323 6986 2427 6987 2427 6988 2325 6989 2429 6990 2429 6991 2327 6992 2431 6993 2431 6994 2329 6995 2433 6996 2433 6997 2331 6998 2435 6999 2435 7000 2333 7001 2437 7002 2339 7003 2439 7004 2441 7005 2341 7006 2441 7007 2443 7008 2343 7009 2443 7010 2445 7011 2345 7012 2445 7013 2447 7014 2347 7015 2447 7016 2449 7017 2349 7018 2449 7019 2451 7020 2351 7021 2451 7022 2453 7023 2353 7024 2453 7025 2455 7026 2355 7027 2455 7028 2457 7029 2357 7030 2457 7031 2459 7032 2359 7033 2459 7034 2461 7035 2361 7036 2461 7037 2463 7038 2363 7039 2463 7040 2465 7041 2365 7042 2465 7043 2467 7044 2367 7045 2467 7046 2469 7047 2369 7048 2469 7049 2471 7050 2371 7051 2471 7052 2473 7053 2373 7054 2473 7055 2475 7056 2375 7057 2475 7058 2477 7059 2377 7060 2477 7061 2479 7062 2379 7063 2479 7064 2481 7065 2381 7066 2481 7067 2483 7068 2383 7069 2381 7070 2485 7071 2385 7072 2383 7073 2487 7074 2387 7075 2385 7076 2489 7077 2389 7078 2387 7079 2491 7080 2391 7081 2389 7082 2493 7083 2393 7084 2391 7085 2495 7086 2395 7087 2393 7088 2497 7089 2397 7090 2395 7091 2499 7092 2399 7093 2397 7094 2501 7095 2401 7096 2399 7097 2503 7098 2403 7099 2401 7100 2505 7101 2405 7102 2403 7103 2507 7104 2407 7105 2405 7106 2509 7107 2409 7108 2407 7109 2511 7110 2411 7111 2409 7112 2513 7113 2413 7114 2411 7115 2515 7116 2415 7117 2413 7118 2517 7119 2417 7120 2415 7121 2519 7122 2521 7123 2419 7124 2421 7125 2523 7126 2421 7127 2423 7128 2525 7129 2423 7130 2425 7131 2527 7132 2425 7133 2427 7134 2529 7135 2427 7136 2429 7137 2531 7138 2429 7139 2431 7140 2533 7141 2431 7142 2433 7143 2535 7144 2433 7145 2435 7146 2537 7147 2435 7148 2437 7149 2439 7150 2539 7151 2441 7152 2441 7153 2541 7154 2443 7155 2443 7156 2543 7157 2445 7158 2445 7159 2545 7160 2447 7161 2447 7162 2547 7163 2449 7164 2449 7165 2549 7166 2451 7167 2451 7168 2551 7169 2453 7170 2453 7171 2553 7172 2455 7173 2455 7174 2555 7175 2457 7176 2457 7177 2557 7178 2459 7179 2459 7180 2559 7181 2461 7182 2461 7183 2561 7184 2463 7185 2463 7186 2563 7187 2465 7188 2465 7189 2565 7190 2467 7191 2467 7192 2567 7193 2469 7194 2469 7195 2569 7196 2471 7197 2471 7198 2571 7199 2473 7200 2473 7201 2573 7202 2475 7203 2475 7204 2575 7205 2477 7206 2477 7207 2577 7208 2479 7209 2479 7210 2579 7211 2481 7212 2481 7213 2581 7214 2483 7215 2381 7216 2483 7217 2485 7218 2383 7219 2485 7220 2487 7221 2385 7222 2487 7223 2489 7224 2387 7225 2489 7226 2491 7227 2389 7228 2491 7229 2493 7230 2391 7231 2493 7232 2495 7233 2393 7234 2495 7235 2497 7236 2395 7237 2497 7238 2499 7239 2397 7240 2499 7241 2501 7242 2399 7243 2501 7244 2503 7245 2401 7246 2503 7247 2505 7248 2403 7249 2505 7250 2507 7251 2405 7252 2507 7253 2509 7254 2407 7255 2509 7256 2511 7257 2409 7258 2511 7259 2513 7260 2411 7261 2513 7262 2515 7263 2413 7264 2515 7265 2517 7266 2415 7267 2517 7268 2519 7269 2521 7270 2421 7271 2523 7272 2523 7273 2423 7274 2525 7275 2525 7276 2425 7277 2527 7278 2527 7279 2427 7280 2529 7281 2529 7282 2429 7283 2531 7284 2531 7285 2431 7286 2533 7287 2533 7288 2433 7289 2535 7290 2535 7291 2435 7292 2537 7293 2441 7294 2539 7295 2541 7296 2443 7297 2541 7298 2543 7299 2445 7300 2543 7301 2545 7302 2447 7303 2545 7304 2547 7305 2449 7306 2547 7307 2549 7308 2451 7309 2549 7310 2551 7311 2453 7312 2551 7313 2553 7314 2455 7315 2553 7316 2555 7317 2457 7318 2555 7319 2557 7320 2459 7321 2557 7322 2559 7323 2461 7324 2559 7325 2561 7326 2463 7327 2561 7328 2563 7329 2465 7330 2563 7331 2565 7332 2467 7333 2565 7334 2567 7335 2469 7336 2567 7337 2569 7338 2471 7339 2569 7340 2571 7341 2473 7342 2571 7343 2573 7344 2475 7345 2573 7346 2575 7347 2477 7348 2575 7349 2577 7350 2479 7351 2577 7352 2579 7353 2481 7354 2579 7355 2581 7356 2483 7357 2581 7358 2583 7359 2485 7360 2483 7361 2585 7362 2487 7363 2485 7364 2587 7365 2489 7366 2487 7367 2589 7368 2491 7369 2489 7370 2591 7371 2493 7372 2491 7373 2593 7374 2495 7375 2493 7376 2595 7377 2497 7378 2495 7379 2597 7380 2499 7381 2497 7382 2599 7383 2501 7384 2499 7385 2601 7386 2503 7387 2501 7388 2603 7389 2505 7390 2503 7391 2605 7392 2507 7393 2505 7394 2607 7395 2509 7396 2507 7397 2609 7398 2511 7399 2509 7400 2611 7401 2513 7402 2511 7403 2613 7404 2515 7405 2513 7406 2615 7407 2517 7408 2515 7409 2617 7410 2519 7411 2517 7412 2619 7413 2621 7414 2521 7415 2523 7416 2623 7417 2523 7418 2525 7419 2625 7420 2525 7421 2527 7422 2627 7423 2527 7424 2529 7425 2629 7426 2529 7427 2531 7428 2631 7429 2531 7430 2533 7431 2633 7432 2533 7433 2535 7434 2635 7435 2535 7436 2537 7437 2539 7438 2637 7439 2541 7440 2541 7441 2639 7442 2543 7443 2543 7444 2641 7445 2545 7446 2545 7447 2643 7448 2547 7449 2547 7450 2645 7451 2549 7452 2549 7453 2647 7454 2551 7455 2551 7456 2649 7457 2553 7458 2553 7459 2651 7460 2555 7461 2555 7462 2653 7463 2557 7464 2557 7465 2655 7466 2559 7467 2559 7468 2657 7469 2561 7470 2561 7471 2659 7472 2563 7473 2563 7474 2661 7475 2565 7476 2565 7477 2663 7478 2567 7479 2567 7480 2665 7481 2569 7482 2569 7483 2667 7484 2571 7485 2571 7486 2669 7487 2573 7488 2573 7489 2671 7490 2575 7491 2575 7492 2673 7493 2577 7494 2577 7495 2675 7496 2579 7497 2579 7498 2677 7499 2581 7500 2581 7501 2679 7502 2583 7503 2483 7504 2583 7505 2585 7506 2485 7507 2585 7508 2587 7509 2487 7510 2587 7511 2589 7512 2489 7513 2589 7514 2591 7515 2491 7516 2591 7517 2593 7518 2493 7519 2593 7520 2595 7521 2495 7522 2595 7523 2597 7524 2497 7525 2597 7526 2599 7527 2499 7528 2599 7529 2601 7530 2501 7531 2601 7532 2603 7533 2503 7534 2603 7535 2605 7536 2505 7537 2605 7538 2607 7539 2507 7540 2607 7541 2609 7542 2509 7543 2609 7544 2611 7545 2511 7546 2611 7547 2613 7548 2513 7549 2613 7550 2615 7551 2515 7552 2615 7553 2617 7554 2517 7555 2617 7556 2619 7557 2621 7558 2523 7559 2623 7560 2623 7561 2525 7562 2625 7563 2625 7564 2527 7565 2627 7566 2627 7567 2529 7568 2629 7569 2629 7570 2531 7571 2631 7572 2631 7573 2533 7574 2633 7575 2633 7576 2535 7577 2635 7578 2541 7579 2637 7580 2639 7581 2543 7582 2639 7583 2641 7584 2545 7585 2641 7586 2643 7587 2547 7588 2643 7589 2645 7590 2549 7591 2645 7592 2647 7593 2551 7594 2647 7595 2649 7596 2553 7597 2649 7598 2651 7599 2555 7600 2651 7601 2653 7602 2557 7603 2653 7604 2655 7605 2559 7606 2655 7607 2657 7608 2561 7609 2657 7610 2659 7611 2563 7612 2659 7613 2661 7614 2565 7615 2661 7616 2663 7617 2567 7618 2663 7619 2665 7620 2569 7621 2665 7622 2667 7623 2571 7624 2667 7625 2669 7626 2573 7627 2669 7628 2671 7629 2575 7630 2671 7631 2673 7632 2577 7633 2673 7634 2675 7635 2579 7636 2675 7637 2677 7638 2581 7639 2677 7640 2679 7641 2583 7642 2679 7643 2681 7644 2585 7645 2583 7646 2683 7647 2587 7648 2585 7649 2685 7650 2589 7651 2587 7652 2687 7653 2591 7654 2589 7655 2689 7656 2593 7657 2591 7658 2691 7659 2595 7660 2593 7661 2693 7662 2597 7663 2595 7664 2695 7665 2599 7666 2597 7667 2697 7668 2601 7669 2599 7670 2699 7671 2603 7672 2601 7673 2701 7674 2605 7675 2603 7676 2703 7677 2607 7678 2605 7679 2705 7680 2609 7681 2607 7682 2707 7683 2611 7684 2609 7685 2709 7686 2613 7687 2611 7688 2711 7689 2615 7690 2613 7691 2713 7692 2617 7693 2615 7694 2715 7695 2619 7696 2617 7697 2717 7698 2719 7699 2621 7700 2623 7701 2721 7702 2623 7703 2625 7704 2723 7705 2625 7706 2627 7707 2725 7708 2627 7709 2629 7710 2727 7711 2629 7712 2631 7713 2729 7714 2631 7715 2633 7716 2731 7717 2633 7718 2635 7719 2637 7720 2733 7721 2639 7722 2639 7723 2735 7724 2641 7725 2641 7726 2737 7727 2643 7728 2643 7729 2739 7730 2645 7731 2645 7732 2741 7733 2647 7734 2647 7735 2743 7736 2649 7737 2649 7738 2745 7739 2651 7740 2651 7741 2747 7742 2653 7743 2653 7744 2749 7745 2655 7746 2655 7747 2751 7748 2657 7749 2657 7750 2753 7751 2659 7752 2659 7753 2755 7754 2661 7755 2661 7756 2757 7757 2663 7758 2663 7759 2759 7760 2665 7761 2665 7762 2761 7763 2667 7764 2667 7765 2763 7766 2669 7767 2669 7768 2765 7769 2671 7770 2671 7771 2767 7772 2673 7773 2673 7774 2769 7775 2675 7776 2675 7777 2771 7778 2677 7779 2677 7780 2773 7781 2679 7782 2679 7783 2775 7784 2681 7785 2583 7786 2681 7787 2683 7788 2585 7789 2683 7790 2685 7791 2587 7792 2685 7793 2687 7794 2589 7795 2687 7796 2689 7797 2591 7798 2689 7799 2691 7800 2593 7801 2691 7802 2693 7803 2595 7804 2693 7805 2695 7806 2597 7807 2695 7808 2697 7809 2599 7810 2697 7811 2699 7812 2601 7813 2699 7814 2701 7815 2603 7816 2701 7817 2703 7818 2605 7819 2703 7820 2705 7821 2607 7822 2705 7823 2707 7824 2609 7825 2707 7826 2709 7827 2611 7828 2709 7829 2711 7830 2613 7831 2711 7832 2713 7833 2615 7834 2713 7835 2715 7836 2617 7837 2715 7838 2717 7839 2719 7840 2623 7841 2721 7842 2721 7843 2625 7844 2723 7845 2723 7846 2627 7847 2725 7848 2725 7849 2629 7850 2727 7851 2727 7852 2631 7853 2729 7854 2729 7855 2633 7856 2731 7857 2639 7858 2733 7859 2735 7860 2641 7861 2735 7862 2737 7863 2643 7864 2737 7865 2739 7866 2645 7867 2739 7868 2741 7869 2647 7870 2741 7871 2743 7872 2649 7873 2743 7874 2745 7875 2651 7876 2745 7877 2747 7878 2653 7879 2747 7880 2749 7881 2655 7882 2749 7883 2751 7884 2657 7885 2751 7886 2753 7887 2659 7888 2753 7889 2755 7890 2661 7891 2755 7892 2757 7893 2663 7894 2757 7895 2759 7896 2665 7897 2759 7898 2761 7899 2667 7900 2761 7901 2763 7902 2669 7903 2763 7904 2765 7905 2671 7906 2765 7907 2767 7908 2673 7909 2767 7910 2769 7911 2675 7912 2769 7913 2771 7914 2677 7915 2771 7916 2773 7917 2679 7918 2773 7919 2775 7920 2681 7921 2775 7922 2777 7923 2683 7924 2681 7925 2779 7926 2685 7927 2683 7928 2781 7929 2687 7930 2685 7931 2783 7932 2689 7933 2687 7934 2785 7935 2691 7936 2689 7937 2787 7938 2693 7939 2691 7940 2789 7941 2695 7942 2693 7943 2791 7944 2697 7945 2695 7946 2793 7947 2699 7948 2697 7949 2795 7950 2701 7951 2699 7952 2797 7953 2703 7954 2701 7955 2799 7956 2705 7957 2703 7958 2801 7959 2707 7960 2705 7961 2803 7962 2709 7963 2707 7964 2805 7965 2711 7966 2709 7967 2807 7968 2713 7969 2711 7970 2809 7971 2715 7972 2713 7973 2811 7974 2717 7975 2715 7976 2813 7977 2815 7978 2719 7979 2721 7980 2817 7981 2721 7982 2723 7983 2819 7984 2723 7985 2725 7986 2821 7987 2725 7988 2727 7989 2823 7990 2727 7991 2729 7992 2825 7993 2729 7994 2731 7995 2733 7996 2827 7997 2735 7998 2735 7999 2829 8000 2737 8001 2737 8002 2831 8003 2739 8004 2739 8005 2833 8006 2741 8007 2741 8008 2835 8009 2743 8010 2743 8011 2837 8012 2745 8013 2745 8014 2839 8015 2747 8016 2747 8017 2841 8018 2749 8019 2749 8020 2843 8021 2751 8022 2751 8023 2845 8024 2753 8025 2753 8026 2847 8027 2755 8028 2755 8029 2849 8030 2757 8031 2757 8032 2851 8033 2759 8034 2759 8035 2853 8036 2761 8037 2761 8038 2855 8039 2763 8040 2763 8041 2857 8042 2765 8043 2765 8044 2859 8045 2767 8046 2767 8047 2861 8048 2769 8049 2769 8050 2863 8051 2771 8052 2771 8053 2865 8054 2773 8055 2773 8056 2867 8057 2775 8058 2775 8059 2869 8060 2777 8061 2681 8062 2777 8063 2779 8064 2683 8065 2779 8066 2781 8067 2685 8068 2781 8069 2783 8070 2687 8071 2783 8072 2785 8073 2689 8074 2785 8075 2787 8076 2691 8077 2787 8078 2789 8079 2693 8080 2789 8081 2791 8082 2695 8083 2791 8084 2793 8085 2697 8086 2793 8087 2795 8088 2699 8089 2795 8090 2797 8091 2701 8092 2797 8093 2799 8094 2703 8095 2799 8096 2801 8097 2705 8098 2801 8099 2803 8100 2707 8101 2803 8102 2805 8103 2709 8104 2805 8105 2807 8106 2711 8107 2807 8108 2809 8109 2713 8110 2809 8111 2811 8112 2715 8113 2811 8114 2813 8115 2815 8116 2721 8117 2817 8118 2817 8119 2723 8120 2819 8121 2819 8122 2725 8123 2821 8124 2821 8125 2727 8126 2823 8127 2823 8128 2729 8129 2825 8130 2735 8131 2827 8132 2829 8133 2737 8134 2829 8135 2831 8136 2739 8137 2831 8138 2833 8139 2741 8140 2833 8141 2835 8142 2743 8143 2835 8144 2837 8145 2745 8146 2837 8147 2839 8148 2747 8149 2839 8150 2841 8151 2749 8152 2841 8153 2843 8154 2751 8155 2843 8156 2845 8157 2753 8158 2845 8159 2847 8160 2755 8161 2847 8162 2849 8163 2757 8164 2849 8165 2851 8166 2759 8167 2851 8168 2853 8169 2761 8170 2853 8171 2855 8172 2763 8173 2855 8174 2857 8175 2765 8176 2857 8177 2859 8178 2767 8179 2859 8180 2861 8181 2769 8182 2861 8183 2863 8184 2771 8185 2863 8186 2865 8187 2773 8188 2865 8189 2867 8190 2775 8191 2867 8192 2869 8193 2871 8194 2815 8195 2817 8196 2873 8197 2817 8198 2819 8199 2875 8200 2819 8201 2821 8202 2877 8203 2821 8204 2823 8205 2879 8206 2823 8207 2825 8208 2827 8209 2881 8210 2829 8211 2829 8212 2883 8213 2831 8214 2831 8215 2885 8216 2833 8217 2833 8218 2887 8219 2835 8220 2835 8221 2889 8222 2837 8223 2837 8224 2891 8225 2839 8226 2839 8227 2893 8228 2841 8229 2841 8230 2895 8231 2843 8232 2843 8233 2897 8234 2845 8235 2845 8236 2899 8237 2847 8238 2847 8239 2901 8240 2849 8241 2849 8242 2903 8243 2851 8244 2851 8245 2905 8246 2853 8247 2853 8248 2907 8249 2855 8250 2855 8251 2909 8252 2857 8253 2857 8254 2911 8255 2859 8256 2859 8257 2913 8258 2861 8259 2861 8260 2915 8261 2863 8262 2863 8263 2917 8264 2865 8265 2865 8266 2919 8267 2867 8268 2867 8269 2921 8270 2869 8271 2871 8272 2817 8273 2873 8274 2873 8275 2819 8276 2875 8277 2875 8278 2821 8279 2877 8280 2877 8281 2823 8282 2879 8283 2829 8284 2881 8285 2883 8286 2831 8287 2883 8288 2885 8289 2833 8290 2885 8291 2887 8292 2835 8293 2887 8294 2889 8295 2837 8296 2889 8297 2891 8298 2839 8299 2891 8300 2893 8301 2841 8302 2893 8303 2895 8304 2843 8305 2895 8306 2897 8307 2845 8308 2897 8309 2899 8310 2847 8311 2899 8312 2901 8313 2849 8314 2901 8315 2903 8316 2851 8317 2903 8318 2905 8319 2853 8320 2905 8321 2907 8322 2855 8323 2907 8324 2909 8325 2857 8326 2909 8327 2911 8328 2859 8329 2911 8330 2913 8331 2861 8332 2913 8333 2915 8334 2863 8335 2915 8336 2917 8337 2865 8338 2917 8339 2919 8340 2867 8341 2919 8342 2921 8343 2923 8344 2871 8345 2873 8346 2925 8347 2873 8348 2875 8349 2927 8350 2875 8351 2877 8352 2929 8353 2877 8354 2879 8355 2881 8356 2931 8357 2883 8358 2883 8359 2933 8360 2885 8361 2885 8362 2935 8363 2887 8364 2887 8365 2937 8366 2889 8367 2889 8368 2939 8369 2891 8370 2891 8371 2941 8372 2893 8373 2893 8374 2943 8375 2895 8376 2895 8377 2945 8378 2897 8379 2897 8380 2947 8381 2899 8382 2899 8383 2949 8384 2901 8385 2901 8386 2951 8387 2903 8388 2903 8389 2953 8390 2905 8391 2905 8392 2955 8393 2907 8394 2907 8395 2957 8396 2909 8397 2909 8398 2959 8399 2911 8400 2911 8401 2961 8402 2913 8403 2913 8404 2963 8405 2915 8406 2915 8407 2965 8408 2917 8409 2917 8410 2967 8411 2919 8412 2919 8413 2969 8414 2921 8415 2923 8416 2873 8417 2925 8418 2925 8419 2875 8420 2927 8421 2927 8422 2877 8423 2929 8424 2883 8425 2931 8426 2933 8427 2885 8428 2933 8429 2935 8430 2887 8431 2935 8432 2937 8433 2889 8434 2937 8435 2939 8436 2891 8437 2939 8438 2941 8439 2893 8440 2941 8441 2943 8442 2895 8443 2943 8444 2945 8445 2897 8446 2945 8447 2947 8448 2899 8449 2947 8450 2949 8451 2901 8452 2949 8453 2951 8454 2903 8455 2951 8456 2953 8457 2905 8458 2953 8459 2955 8460 2907 8461 2955 8462 2957 8463 2909 8464 2957 8465 2959 8466 2911 8467 2959 8468 2961 8469 2913 8470 2961 8471 2963 8472 2915 8473 2963 8474 2965 8475 2917 8476 2965 8477 2967 8478 2919 8479 2967 8480 2969 8481 2971 8482 2923 8483 2925 8484 2973 8485 2925 8486 2927 8487 2975 8488 2927 8489 2929 8490 2931 8491 2977 8492 2933 8493 2933 8494 2979 8495 2935 8496 2935 8497 2981 8498 2937 8499 2937 8500 2983 8501 2939 8502 2939 8503 2985 8504 2941 8505 2941 8506 2987 8507 2943 8508 2943 8509 2989 8510 2945 8511 2945 8512 2991 8513 2947 8514 2947 8515 2993 8516 2949 8517 2949 8518 2995 8519 2951 8520 2951 8521 2997 8522 2953 8523 2953 8524 2999 8525 2955 8526 2955 8527 3001 8528 2957 8529 2957 8530 3003 8531 2959 8532 2959 8533 3005 8534 2961 8535 2961 8536 3007 8537 2963 8538 2963 8539 3009 8540 2965 8541 2965 8542 3011 8543 2967 8544 2967 8545 3013 8546 2969 8547 2971 8548 2925 8549 2973 8550 2973 8551 2927 8552 2975 8553 2933 8554 2977 8555 2979 8556 2935 8557 2979 8558 2981 8559 2937 8560 2981 8561 2983 8562 2939 8563 2983 8564 2985 8565 2941 8566 2985 8567 2987 8568 2943 8569 2987 8570 2989 8571 2945 8572 2989 8573 2991 8574 2947 8575 2991 8576 2993 8577 2949 8578 2993 8579 2995 8580 2951 8581 2995 8582 2997 8583 2953 8584 2997 8585 2999 8586 2955 8587 2999 8588 3001 8589 2957 8590 3001 8591 3003 8592 2959 8593 3003 8594 3005 8595 2961 8596 3005 8597 3007 8598 2963 8599 3007 8600 3009 8601 2965 8602 3009 8603 3011 8604 2967 8605 3011 8606 3013 8607 3015 8608 2971 8609 2973 8610 3017 8611 2973 8612 2975 8613 2977 8614 3019 8615 2979 8616 2979 8617 3021 8618 2981 8619 2981 8620 3023 8621 2983 8622 2983 8623 3025 8624 2985 8625 2985 8626 3027 8627 2987 8628 2987 8629 3029 8630 2989 8631 2989 8632 3031 8633 2991 8634 2991 8635 3033 8636 2993 8637 2993 8638 3035 8639 2995 8640 2995 8641 3037 8642 2997 8643 2997 8644 3039 8645 2999 8646 2999 8647 3041 8648 3001 8649 3001 8650 3043 8651 3003 8652 3003 8653 3045 8654 3005 8655 3005 8656 3047 8657 3007 8658 3007 8659 3049 8660 3009 8661 3009 8662 3051 8663 3011 8664 3011 8665 3053 8666 3013 8667 3015 8668 2973 8669 3017 8670 2979 8671 3019 8672 3021 8673 2981 8674 3021 8675 3023 8676 2983 8677 3023 8678 3025 8679 2985 8680 3025 8681 3027 8682 2987 8683 3027 8684 3029 8685 2989 8686 3029 8687 3031 8688 2991 8689 3031 8690 3033 8691 2993 8692 3033 8693 3035 8694 2995 8695 3035 8696 3037 8697 2997 8698 3037 8699 3039 8700 2999 8701 3039 8702 3041 8703 3001 8704 3041 8705 3043 8706 3003 8707 3043 8708 3045 8709 3005 8710 3045 8711 3047 8712 3007 8713 3047 8714 3049 8715 3009 8716 3049 8717 3051 8718 3011 8719 3051 8720 3053 8721 3055 8722 3015 8723 3017 8724 3019 8725 3057 8726 3021 8727 3021 8728 3059 8729 3023 8730 3023 8731 3061 8732 3025 8733 3025 8734 3063 8735 3027 8736 3027 8737 3065 8738 3029 8739 3029 8740 3067 8741 3031 8742 3031 8743 3069 8744 3033 8745 3033 8746 3071 8747 3035 8748 3035 8749 3073 8750 3037 8751 3037 8752 3075 8753 3039 8754 3039 8755 3077 8756 3041 8757 3041 8758 3079 8759 3043 8760 3043 8761 3081 8762 3045 8763 3045 8764 3083 8765 3047 8766 3047 8767 3085 8768 3049 8769 3049 8770 3087 8771 3051 8772 3051 8773 3089 8774 3053 8775 3021 8776 3057 8777 3059 8778 3023 8779 3059 8780 3061 8781 3025 8782 3061 8783 3063 8784 3027 8785 3063 8786 3065 8787 3029 8788 3065 8789 3067 8790 3031 8791 3067 8792 3069 8793 3033 8794 3069 8795 3071 8796 3035 8797 3071 8798 3073 8799 3037 8800 3073 8801 3075 8802 3039 8803 3075 8804 3077 8805 3041 8806 3077 8807 3079 8808 3043 8809 3079 8810 3081 8811 3045 8812 3081 8813 3083 8814 3047 8815 3083 8816 3085 8817 3049 8818 3085 8819 3087 8820 3051 8821 3087 8822 3089 8823 3057 8824 3091 8825 3059 8826 3059 8827 3093 8828 3061 8829 3061 8830 3095 8831 3063 8832 3063 8833 3097 8834 3065 8835 3065 8836 3099 8837 3067 8838 3067 8839 3101 8840 3069 8841 3069 8842 3103 8843 3071 8844 3071 8845 3105 8846 3073 8847 3073 8848 3107 8849 3075 8850 3075 8851 3109 8852 3077 8853 3077 8854 3111 8855 3079 8856 3079 8857 3113 8858 3081 8859 3081 8860 3115 8861 3083 8862 3083 8863 3117 8864 3085 8865 3085 8866 3119 8867 3087 8868 3087 8869 3121 8870 3089 8871 3059 8872 3091 8873 3093 8874 3061 8875 3093 8876 3095 8877 3063 8878 3095 8879 3097 8880 3065 8881 3097 8882 3099 8883 3067 8884 3099 8885 3101 8886 3069 8887 3101 8888 3103 8889 3071 8890 3103 8891 3105 8892 3073 8893 3105 8894 3107 8895 3075 8896 3107 8897 3109 8898 3077 8899 3109 8900 3111 8901 3079 8902 3111 8903 3113 8904 3081 8905 3113 8906 3115 8907 3083 8908 3115 8909 3117 8910 3085 8911 3117 8912 3119 8913 3087 8914 3119 8915 3121 8916 3091 8917 3123 8918 3093 8919 3093 8920 3125 8921 3095 8922 3095 8923 3127 8924 3097 8925 3097 8926 3129 8927 3099 8928 3099 8929 3131 8930 3101 8931 3101 8932 3133 8933 3103 8934 3103 8935 3135 8936 3105 8937 3105 8938 3137 8939 3107 8940 3107 8941 3139 8942 3109 8943 3109 8944 3141 8945 3111 8946 3111 8947 3143 8948 3113 8949 3113 8950 3145 8951 3115 8952 3115 8953 3147 8954 3117 8955 3117 8956 3149 8957 3119 8958 3119 8959 3151 8960 3121 8961 3093 8962 3123 8963 3125 8964 3095 8965 3125 8966 3127 8967 3097 8968 3127 8969 3129 8970 3099 8971 3129 8972 3131 8973 3101 8974 3131 8975 3133 8976 3103 8977 3133 8978 3135 8979 3105 8980 3135 8981 3137 8982 3107 8983 3137 8984 3139 8985 3109 8986 3139 8987 3141 8988 3111 8989 3141 8990 3143 8991 3113 8992 3143 8993 3145 8994 3115 8995 3145 8996 3147 8997 3117 8998 3147 8999 3149 9000 3119 9001 3149 9002 3151 9003 3123 9004 3153 9005 3125 9006 3125 9007 3155 9008 3127 9009 3127 9010 3157 9011 3129 9012 3129 9013 3159 9014 3131 9015 3131 9016 3161 9017 3133 9018 3133 9019 3163 9020 3135 9021 3135 9022 3165 9023 3137 9024 3137 9025 3167 9026 3139 9027 3139 9028 3169 9029 3141 9030 3141 9031 3171 9032 3143 9033 3143 9034 3173 9035 3145 9036 3145 9037 3175 9038 3147 9039 3147 9040 3177 9041 3149 9042 3149 9043 3179 9044 3151 9045 3125 9046 3153 9047 3155 9048 3127 9049 3155 9050 3157 9051 3129 9052 3157 9053 3159 9054 3131 9055 3159 9056 3161 9057 3133 9058 3161 9059 3163 9060 3135 9061 3163 9062 3165 9063 3137 9064 3165 9065 3167 9066 3139 9067 3167 9068 3169 9069 3141 9070 3169 9071 3171 9072 3143 9073 3171 9074 3173 9075 3145 9076 3173 9077 3175 9078 3147 9079 3175 9080 3177 9081 3149 9082 3177 9083 3179 9084 3153 9085 3181 9086 3155 9087 3155 9088 3183 9089 3157 9090 3157 9091 3185 9092 3159 9093 3159 9094 3187 9095 3161 9096 3161 9097 3189 9098 3163 9099 3163 9100 3191 9101 3165 9102 3165 9103 3193 9104 3167 9105 3167 9106 3195 9107 3169 9108 3169 9109 3197 9110 3171 9111 3171 9112 3199 9113 3173 9114 3173 9115 3201 9116 3175 9117 3175 9118 3203 9119 3177 9120 3177 9121 3205 9122 3179 9123 3155 9124 3181 9125 3183 9126 3157 9127 3183 9128 3185 9129 3159 9130 3185 9131 3187 9132 3161 9133 3187 9134 3189 9135 3163 9136 3189 9137 3191 9138 3165 9139 3191 9140 3193 9141 3167 9142 3193 9143 3195 9144 3169 9145 3195 9146 3197 9147 3171 9148 3197 9149 3199 9150 3173 9151 3199 9152 3201 9153 3175 9154 3201 9155 3203 9156 3177 9157 3203 9158 3205 9159 3181 9160 3207 9161 3183 9162 3183 9163 3209 9164 3185 9165 3185 9166 3211 9167 3187 9168 3187 9169 3213 9170 3189 9171 3189 9172 3215 9173 3191 9174 3191 9175 3217 9176 3193 9177 3193 9178 3219 9179 3195 9180 3195 9181 3221 9182 3197 9183 3197 9184 3223 9185 3199 9186 3199 9187 3225 9188 3201 9189 3201 9190 3227 9191 3203 9192 3203 9193 3229 9194 3205 9195 3183 9196 3207 9197 3209 9198 3185 9199 3209 9200 3211 9201 3187 9202 3211 9203 3213 9204 3189 9205 3213 9206 3215 9207 3191 9208 3215 9209 3217 9210 3193 9211 3217 9212 3219 9213 3195 9214 3219 9215 3221 9216 3197 9217 3221 9218 3223 9219 3199 9220 3223 9221 3225 9222 3201 9223 3225 9224 3227 9225 3203 9226 3227 9227 3229 9228 3207 9229 3231 9230 3209 9231 3209 9232 3233 9233 3211 9234 3211 9235 3235 9236 3213 9237 3213 9238 3237 9239 3215 9240 3215 9241 3239 9242 3217 9243 3217 9244 3241 9245 3219 9246 3219 9247 3243 9248 3221 9249 3221 9250 3245 9251 3223 9252 3223 9253 3247 9254 3225 9255 3225 9256 3249 9257 3227 9258 3227 9259 3251 9260 3229 9261 3209 9262 3231 9263 3233 9264 3211 9265 3233 9266 3235 9267 3213 9268 3235 9269 3237 9270 3215 9271 3237 9272 3239 9273 3217 9274 3239 9275 3241 9276 3219 9277 3241 9278 3243 9279 3221 9280 3243 9281 3245 9282 3223 9283 3245 9284 3247 9285 3225 9286 3247 9287 3249 9288 3227 9289 3249 9290 3251 9291 3231 9292 3253 9293 3233 9294 3233 9295 3255 9296 3235 9297 3235 9298 3257 9299 3237 9300 3237 9301 3259 9302 3239 9303 3239 9304 3261 9305 3241 9306 3241 9307 3263 9308 3243 9309 3243 9310 3265 9311 3245 9312 3245 9313 3267 9314 3247 9315 3247 9316 3269 9317 3249 9318 3249 9319 3271 9320 3251 9321 3233 9322 3253 9323 3255 9324 3235 9325 3255 9326 3257 9327 3237 9328 3257 9329 3259 9330 3239 9331 3259 9332 3261 9333 3241 9334 3261 9335 3263 9336 3243 9337 3263 9338 3265 9339 3245 9340 3265 9341 3267 9342 3247 9343 3267 9344 3269 9345 3249 9346 3269 9347 3271 9348 3253 9349 3273 9350 3255 9351 3255 9352 3275 9353 3257 9354 3257 9355 3277 9356 3259 9357 3259 9358 3279 9359 3261 9360 3261 9361 3281 9362 3263 9363 3263 9364 3283 9365 3265 9366 3265 9367 3285 9368 3267 9369 3267 9370 3287 9371 3269 9372 3269 9373 3289 9374 3271 9375 3255 9376 3273 9377 3275 9378 3257 9379 3275 9380 3277 9381 3259 9382 3277 9383 3279 9384 3261 9385 3279 9386 3281 9387 3263 9388 3281 9389 3283 9390 3265 9391 3283 9392 3285 9393 3267 9394 3285 9395 3287 9396 3269 9397 3287 9398 3289 9399 3273 9400 3291 9401 3275 9402 3275 9403 3293 9404 3277 9405 3277 9406 3295 9407 3279 9408 3279 9409 3297 9410 3281 9411 3281 9412 3299 9413 3283 9414 3283 9415 3301 9416 3285 9417 3285 9418 3303 9419 3287 9420 3287 9421 3305 9422 3289 9423 3275 9424 3291 9425 3293 9426 3277 9427 3293 9428 3295 9429 3279 9430 3295 9431 3297 9432 3281 9433 3297 9434 3299 9435 3283 9436 3299 9437 3301 9438 3285 9439 3301 9440 3303 9441 3287 9442 3303 9443 3305 9444 3291 9445 3307 9446 3293 9447 3293 9448 3309 9449 3295 9450 3295 9451 3311 9452 3297 9453 3297 9454 3313 9455 3299 9456 3299 9457 3315 9458 3301 9459 3301 9460 3317 9461 3303 9462 3303 9463 3319 9464 3305 9465 3293 9466 3307 9467 3309 9468 3295 9469 3309 9470 3311 9471 3297 9472 3311 9473 3313 9474 3299 9475 3313 9476 3315 9477 3301 9478 3315 9479 3317 9480 3303 9481 3317 9482 3319 9483 3307 9484 3321 9485 3309 9486 3309 9487 3323 9488 3311 9489 3311 9490 3325 9491 3313 9492 3313 9493 3327 9494 3315 9495 3315 9496 3329 9497 3317 9498 3317 9499 3331 9500 3319 9501 3309 9502 3321 9503 3323 9504 3311 9505 3323 9506 3325 9507 3313 9508 3325 9509 3327 9510 3315 9511 3327 9512 3329 9513 3317 9514 3329 9515 3331 9516 3321 9517 3333 9518 3323 9519 3323 9520 3335 9521 3325 9522 3325 9523 3337 9524 3327 9525 3327 9526 3339 9527 3329 9528 3329 9529 3341 9530 3331 9531 3323 9532 3333 9533 3335 9534 3325 9535 3335 9536 3337 9537 3327 9538 3337 9539 3339 9540 3329 9541 3339 9542 3341 9543 3333 9544 3343 9545 3335 9546 3335 9547 3345 9548 3337 9549 3337 9550 3347 9551 3339 9552 3339 9553 3349 9554 3341 9555 3335 9556 3343 9557 3345 9558 3337 9559 3345 9560 3347 9561 3339 9562 3347 9563 3349 9564 3343 9565 3351 9566 3345 9567 3345 9568 3353 9569 3347 9570 3347 9571 3355 9572 3349 9573 3345 9574 3351 9575 3353 9576 3347 9577 3353 9578 3355 9579 3351 9580 3357 9581 3353 9582 3353 9583 3359 9584 3355 9585 3353 9586 3357 9587 3359 9588 3357 9589 3361 9590 3359 9591

+
+
+
+
+ + + + + + + + + + + + + + + 1 1 1 1 + + + + + + + + + + ID11 + + + + + ID12 + + + + + + + + + + + + + + + textures/Vegetation_Grass1.jpg + + + + + +
diff --git a/wild_visual_navigation_sim/Media/models/textures/Vegetation_Grass1 (copy).jpg b/wild_visual_navigation_sim/Media/models/textures/Vegetation_Grass1 (copy).jpg new file mode 100644 index 00000000..6be377f3 Binary files /dev/null and b/wild_visual_navigation_sim/Media/models/textures/Vegetation_Grass1 (copy).jpg differ diff --git a/wild_visual_navigation_sim/Media/models/textures/Vegetation_Grass1.jpg b/wild_visual_navigation_sim/Media/models/textures/Vegetation_Grass1.jpg new file mode 100644 index 00000000..90775be1 Binary files /dev/null and b/wild_visual_navigation_sim/Media/models/textures/Vegetation_Grass1.jpg differ diff --git a/wild_visual_navigation_sim/Media/models/textures/mtt.png b/wild_visual_navigation_sim/Media/models/textures/mtt.png new file mode 100644 index 00000000..07485fa4 Binary files /dev/null and b/wild_visual_navigation_sim/Media/models/textures/mtt.png differ diff --git a/wild_visual_navigation_sim/Media/models/textures/texture0.jpg b/wild_visual_navigation_sim/Media/models/textures/texture0.jpg new file mode 100644 index 00000000..96adb41c Binary files /dev/null and b/wild_visual_navigation_sim/Media/models/textures/texture0.jpg differ diff --git a/wild_visual_navigation_sim/Media/models/textures/texture0.png b/wild_visual_navigation_sim/Media/models/textures/texture0.png new file mode 100644 index 00000000..23fd62ff Binary files /dev/null and b/wild_visual_navigation_sim/Media/models/textures/texture0.png differ diff --git a/wild_visual_navigation_sim/README.md b/wild_visual_navigation_sim/README.md new file mode 100644 index 00000000..bb92611e --- /dev/null +++ b/wild_visual_navigation_sim/README.md @@ -0,0 +1,20 @@ +# Wild Visual Navigation Sim + +Simulation environment to test Wild Visual Navigation (WVN). We use a modified Clearpath Jackal (adding a camera). + +## Requirements + +```sh +$ sudo apt update +$ sudo apt install -y \ + ros-noetic-jackal-simulator \ + ros-noetic-jackal-desktop \ + ros-noetic-teleop-twist-keyboard \ + ros-noetic-rqt-robot-steering \ +``` + +## Running + +```sh +$ roslaunch wild_visual_navigation_sim sim.launch +``` diff --git a/wild_visual_navigation_sim/launch/sim.launch b/wild_visual_navigation_sim/launch/sim.launch new file mode 100644 index 00000000..68e83fa8 --- /dev/null +++ b/wild_visual_navigation_sim/launch/sim.launch @@ -0,0 +1,34 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/wild_visual_navigation_sim/package.xml b/wild_visual_navigation_sim/package.xml new file mode 100644 index 00000000..921c777c --- /dev/null +++ b/wild_visual_navigation_sim/package.xml @@ -0,0 +1,26 @@ + + + wild_visual_navigation_sim + 0.0.1 + Simulation environment for wild_visual_navigation + Matias Mattamala + Jonas Frey + Matias Mattamala + Jonas Frey + + Proprietary + + catkin + + rospy + nav_msgs + sensor_msgs + std_msgs + wild_visual_navigation_ros + + gazebo_ros + + + + + diff --git a/wild_visual_navigation_sim/worlds/outdoor.world b/wild_visual_navigation_sim/worlds/outdoor.world new file mode 100644 index 00000000..a0bb69ba --- /dev/null +++ b/wild_visual_navigation_sim/worlds/outdoor.world @@ -0,0 +1,314 @@ + + + + + 0.2 0.2 0.2 1 + 1 + 0 + + 0 0 -9.81 + + 1 + 0 0 100 0 -0 0 + 0.8 0.8 0.8 1 + 0.9 0.9 0.9 1 + + 1000 + 0.1 + 0.01 + 0.001 + + 0.2 0.2 -0.9 + + + 1 + + + + + + file://heightmap.png + 30 30 12 + 0 0 -1 + + 10 + __default__ + __default__ + + + 0 + 0 + + + + 10 + + + + + + + + + + + + + + + + + + file://media/materials/textures/dirt_diffusespecular.png + file://media/materials/textures/flat_normal.png + 7 + + + file://media/materials/textures/grass_diffusespecular.png + file://media/materials/textures/flat_normal.png + 3 + + + file://media/materials/textures/fungus_diffusespecular.png + file://media/materials/textures/flat_normal.png + 4 + + + 0 + 0.05 + + + -1 + 7 + + + file://heightmap.png + 30 30 12 + 0 0 -1 + + + + 0 + 0 + 0 + + + + + + true + + 3 3 -1 0 0 0 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + 5 6 -1.0 0.0 0 0.3 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + 1 8 -0.2 0.0 0 0 + + + + file://OliveTree2.dae + 1 1 1 + + + + + + + file://OliveTree2.dae + 1 1 1 + + + false + + + + + + true + + -5 4 -1.0 0 0.1 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + -10 -3 -1.0 0 0 0.3 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + -6 -1 -0.8 0 0 0.5 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + -1 1 -1.0 0.0 0 0.2 + + + + file://OliveTree2.dae + 1 1 1 + + + + + + + file://OliveTree2.dae + 1 1 1 + + + false + + + + + + true + + 9 -4 -0.5 0 0 0.5 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + 6 -0.5 -1.0 0 0 0.5 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + \ No newline at end of file diff --git a/wild_visual_navigation_sim/worlds/scenario.world b/wild_visual_navigation_sim/worlds/scenario.world new file mode 100644 index 00000000..46cfec7b --- /dev/null +++ b/wild_visual_navigation_sim/worlds/scenario.world @@ -0,0 +1,282 @@ + + + + + + 0.5 0.5 0.5 1 + 0.5 0.5 0.5 1 + false + + + 0 0 -9.81 + + + quick + 10 + 1.3 + + + 0.0 + 0.2 + 10 + 0.001 + + + 0.001 + + + 0 20 20 0.1 0.1 0 + 1 1 1 1 + 1 1 1 1 + + 300 + + 0.1 0.1 -1 + false + + + + -15.0 -15.0 0.5 0 0 0 + + + + file://scenario.dae + 1 1 0.4 + + + + + + + file://scenario.dae + 1 1 0.4 + + + false + + + true + + + + + true + + 3 3 0.6 0 0 0 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + 5 6 0.7 0 0 0.3 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + 1 8 0.9 0 0 0 + + + + file://OliveTree2.dae + 1 1 1 + + + + + + + file://OliveTree2.dae + 1 1 1 + + + false + + + + + + true + + -5 4 0.8 0 0 0.1 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + -10 -3 1.1 0 0 0.3 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + -6 -7 1.02 0 0 0.5 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + -1 -6 0.8 0 0 0.2 + + + + file://OliveTree2.dae + 1 1 1 + + + + + + + file://OliveTree2.dae + 1 1 1 + + + false + + + + + + true + + 9 -4 0.5 0 0 0.5 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + true + + 6 -10 0.8 0 0 0.5 + + + + file://OliveTree1.dae + 1 1 1 + + + + + + + file://OliveTree1.dae + 1 1 1 + + + false + + + + + + \ No newline at end of file