diff --git a/.nojekyll b/.nojekyll index 99b79fe..213cc24 100644 --- a/.nojekyll +++ b/.nojekyll @@ -1 +1 @@ -3cda822f \ No newline at end of file +6b17fe80 \ No newline at end of file diff --git a/search.json b/search.json index e73e490..64946f0 100644 --- a/search.json +++ b/search.json @@ -1,4 +1,32 @@ [ + { + "objectID": "polire/gp/tests/GP interpolation.html", + "href": "polire/gp/tests/GP interpolation.html", + "title": "Polire", + "section": "", + "text": "from pykrige import OrdinaryKriging\n\n\nimport pandas as pd\nimport numpy as np\n\n\nok = OrdinaryKriging(data[:,0],data[:,1],data[:,2])\nok.ex\n\n\na,b = ok.execute('grid',x[0],y[:,0])\n\n\nfrom pykrige import OrdinaryKriging\nimport pandas as pd\nimport numpy as np\n\ndef ordinary_kriging(dataset, resolution='standard', coordinate_type='euclidean',verbose='False',method='grid', isvariance = False):\n if coordinate_type == 'latlong_small':\n \"\"\"\n Assume that the Earth is a Sphere, and use polar coordinates\n $| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$\n \"\"\"\n return \"To be done later\"\n if coordinate_type == 'latlong_large':\n \"\"\"\n Code to be written after understanding all the projections.\n \"\"\"\n return \"To be done later\"\n if coordinate_type==\"euclidean\":\n \n ok = OrdinaryKriging(dataset[:,0],dataset[:,1],dataset[:,2])\n X = dataset[:,0]\n y = dataset[:,1]\n \n if resolution=='high':\n xx,yy = make_grid(X,y,1000)\n \n elif resolution=='low':\n xx,yy = make_grid(X,y,10)\n \n elif resolution=='standard':\n xx,yy = make_grid(X,y,100)\n \n else:\n print('Value Error - Resolution can only be one of \\nhigh, low or standard')\n \n values, variances = ok.execute(method, xx[0], yy[:,0])\n \n if isvariance:\n return values, variances\n else:\n del variances\n return np.array(values)\n\n\nordinary_kriging(data)\n\narray([[129.94984945, 129.7682324 , 129.58820662, ..., 159.34079485,\n 159.99175016, 160.63241067],\n [130.22090025, 130.03615966, 129.8529146 , ..., 159.9575165 ,\n 160.61228126, 161.25625641],\n [130.50105231, 130.31324536, 130.12683652, ..., 160.59265384,\n 161.25084023, 161.8977369 ],\n ...,\n [207.22133238, 207.82739139, 208.44615116, ..., 248.64646661,\n 248.3790241 , 248.11033441],\n [207.92838926, 208.53490708, 209.15376273, ..., 248.91678379,\n 248.65601627, 248.39371596],\n [208.61942088, 209.22595474, 209.84445913, ..., 249.17442481,\n 248.9203453 , 248.66446245]])\n\n\n\nWhat does ok(‘points’) really do?\nSpecifically test when points aren’t really passed - they are let’s say the point of an array\nReturns the diagonal matrix of all these coordinates\n\n\nordinary_kriging(data,method='points')\n\narray([129.94984945, 130.03615966, 130.12683652, 130.22219703,\n 130.32258826, 130.42839089, 130.54002324, 130.65794596,\n 130.7826674 , 130.91474976, 131.05481629, 131.20355964,\n 131.36175158, 131.53025441, 131.71003442, 131.90217771,\n 132.107909 , 132.32861401, 132.56586607, 132.82145795,\n 133.0974399 , 133.39616477, 133.72034153, 134.07309736,\n 134.45804822, 134.87937482, 135.34189663, 135.85112772,\n 136.41328222, 137.03517039, 137.72388496, 138.48612122,\n 139.326921 , 140.24763047, 141.24300526, 142.29757046,\n 143.37881815, 144.38425962, 144.49187978, 143.1202101 ,\n 141.66667134, 140.45686022, 139.66795657, 142.48270308,\n 147.03665055, 151.8487008 , 156.90272514, 162.25791164,\n 168.04938768, 173.63870768, 180.93567147, 190.3440156 ,\n 199.86834472, 208.48375248, 215.75635742, 222.1915652 ,\n 228.08641413, 233.15249702, 236.89713686, 239.83524192,\n 242.45744315, 244.57483343, 245.52139699, 245.88236757,\n 246.12295211, 246.3306567 , 246.52369882, 246.70598807,\n 246.87792737, 247.03919426, 247.18952217, 247.3288843 ,\n 247.45749059, 247.57573348, 247.68412862, 247.78326467,\n 247.87376505, 247.95626051, 248.03137024, 248.09968963,\n 248.16178271, 248.21817801, 248.26936683, 248.31580309,\n 248.35790422, 248.39605277, 248.43059841, 248.46186013,\n 248.49012851, 248.51566797, 248.53871897, 248.55950011,\n 248.57821004, 248.59502931, 248.61012204, 248.62363741,\n 248.63571111, 248.64646661, 248.65601627, 248.66446245])\n\n\n\ndef make_grid(X,y,res):\n y_min = y.min()-0.2\n y_max = y.max()+0.2\n x_min = X.min()-0.2\n x_max = X.max()+0.2\n x_arr = np.linspace(x_min,x_max,res)\n y_arr = np.linspace(y_min,y_max,res)\n xx,yy = np.meshgrid(x_arr,y_arr) \n return xx,yy\nx, y = make_grid(data[:,0],data[:,1],100)" + }, + { + "objectID": "polire/idw/tests/Numpy+IDWTest.html", + "href": "polire/idw/tests/Numpy+IDWTest.html", + "title": "Polire", + "section": "", + "text": "import numpy as np\n\n\na = np.array([[1,2,3],[4,5,6]])\n\n\na\n\narray([[1, 2, 3],\n [4, 5, 6]])\n\n\n\nb = np.array([[2,3,4],[5,6,9]])\n\n\nb\n\narray([[2, 3, 4],\n [5, 6, 9]])\n\n\n\na\n\narray([[1, 2, 3],\n [4, 5, 6]])\n\n\n\na - b\n\narray([[-1, -1, -1],\n [-1, -1, -3]])\n\n\n\nnp.argmin([np.linalg.norm(a[i] - b[i]) for i in range(len(a))])\n\n1.7320508075688772\n\n\n\nnp.min?\n\n\n\n\"\"\"\nThis is a module for IDW Spatial Interpolation\n\"\"\"\nimport numpy as np\nimport pandas as pd\nfrom copy import deepcopy\nclass idw():\n \"\"\" A class that is declared for performing IDW Interpolation.\n For more information on how this method works, kindly refer to\n https://en.wikipedia.org/wiki/Inverse_distance_weighting\n\n Parameters\n ----------\n exponent : positive float, optional\n The rate of fall of values from source data points.\n Higher the exponent, lower is the value when we move\n across space. Default value is 2.\n resolution: str, optional\n Decides the smoothness of the interpolation. Note that\n interpolation is done over a grid. Higher the resolution\n means more grid cells and more time for interpolation.\n Default value is 'standard'\n coordinate_type: str, optional\n Decides the distance metric to be used, while performing\n interpolation. Euclidean by default. \n \"\"\"\n def __init__(self, exponent = 2, resolution = 'standard', coordinate_type='Euclidean'):\n \n self.exponent = exponent\n self.resolution = resolution\n self.coordinate_type = coordinate_type\n self.interpolated_values = None\n self.x_grid = None\n self.y_grid = None\n\n def make_grid(self, x, y, res, offset=0.2):\n\n \"\"\" This function returns the grid to perform interpolation on.\n This function is used inside the fit() attribute of the idw class.\n \n Parameters\n ----------\n x: array-like, shape(n_samples,)\n The first coordinate values of all points where\n ground truth is available\n y: array-like, shape(n_samples,)\n The second coordinate values of all points where\n ground truth is available\n res: int\n The resolution value\n offset: float, optional\n A value between 0 and 0.5 that specifies the extra interpolation to be done\n Default is 0.2\n \n Returns\n -------\n xx : {array-like, 2D}, shape (n_samples, n_samples)\n yy : {array-like, 2D}, shape (n_samples, n_samples)\n \"\"\"\n y_min = y.min() - offset\n y_max = y.max()+ offset\n x_min = x.min()-offset\n x_max = x.max()+offset\n x_arr = np.linspace(x_min,x_max,res)\n y_arr = np.linspace(y_min,y_max,res)\n xx,yy = np.meshgrid(x_arr,y_arr) \n return xx,yy\n\n \n def fit(self, X, y):\n \"\"\" The function call to fit the model on the given data. \n Parameters\n ----------\n X: {array-like, 2D matrix}, shape(n_samples, 2)\n The set of all coordinates, where we have ground truth\n values\n y: array-like, shape(n_samples,)\n The set of all the ground truth values using which\n we perform interpolation\n\n Returns\n -------\n self : object\n Returns self\n \"\"\"\n\n# if self.coordinate_type == 'latlong_small':\n# \"\"\"\n# Use the conversions and projections for small changes in LatLong\n# \"\"\"\n# print (\"To be done later\")\n# return self\n\n# if self.coordinate_type == 'latlong_large':\n# \"\"\"\n# Code to be written after understanding all the projections.\n# \"\"\"\n# print (\"To be done later\")\n# return self\n\n if self.coordinate_type==\"Euclidean\":\n \n X = deepcopy(np.c_[X,y])\n\n if self.resolution=='high':\n xx,yy = self.make_grid(X,y,1000)\n \n if self.resolution=='low':\n xx,yy = self.make_grid(X,y,10)\n \n if self.resolution=='standard':\n xx,yy = self.make_grid(X,y,100)\n\n new = []\n new_arr = deepcopy(X)\n for points in new_arr:\n min_dist = np.inf\n val = 0\n for j in range(len(yy)):\n temp = yy[j][0]\n for i in range(len(xx[0])):\n dist = np.linalg.norm(np.array([xx[0][i],temp]) - points[:2])\n if dist<min_dist:\n min_dist = dist\n val = (i,j)\n new.append((points,val))\n new_grid = np.zeros((len(xx),len(yy)))\n for i in range(len(new)):\n x = new[i][1][0]\n y = new[i][1][1]\n new_grid[x][y] = new[i][0][2]\n x_nz,y_nz = np.nonzero(new_grid)\n list_nz = []\n for i in range(len(x_nz)):\n list_nz.append((x_nz[i],y_nz[i]))\n final = np.copy(new_grid)\n for i in range(len(xx[0])):\n for j in range(len(yy)):\n normalise = 0\n if (i,j) in list_nz:\n continue\n else:\n for elem in range(len(x_nz)):\n source = np.array([x_nz[elem],y_nz[elem]])\n target = np.array([xx[0][i],yy[j][0]])\n dist = (np.abs(xx[0][source[0]] - target[0])**self.exponent + np.abs(yy[source[1]][0] - target[1])**self.exponent)**(1/self.exponent)\n final[i][j]+=new_grid[x_nz[elem],y_nz[elem]]/dist\n normalise+=1/(dist)\n final[i][j]/=normalise\n self.interpolated_values = final\n self.x_grid = xx\n self.y_grid = yy\n \n return self\n\n# def predict(self, X):\n# \"\"\" The function call to predict using the interpolated data\n# Parameters\n# ----------\n# X: {array-like, 2D matrix}, shape(n_samples, 2)\n# The set of all coordinates, where we have ground truth\n# values\n \n\n# Returns\n# -------\n# y: array-like, shape(n_samples,)\n# The set of all the ground truth values using which\n# we perform interpolation \n# \"\"\"\n# if self.coordinate_type == 'Euclidean':\n# for i in range(self.x_grid[0]):\n# for j in range()\n \n# else:\n# print(\"Will be done later\")\n# return \n \n \n# self.x_grid\n\n\n\na = idw()\nimport pandas as pd\ndf = pd.read_csv('../../testdata/30-03-18.csv')\ndata = np.array(df[['longitude','latitude','value']])\na.fit(data[:,:2],data[:,2])\n\n<__main__.idw at 0x7f36db6f9c88>\n\n\n\na.interpolated_values\n\narray([[171.89189189, 171.89597641, 171.90813547, ..., 173.89050472,\n 173.89261459, 173.89466512],\n [171.77142857, 171.77625338, 171.79060316, ..., 173.89585441,\n 173.89787202, 173.89983245],\n [171.63636364, 171.64211895, 171.65921778, ..., 173.9012935 ,\n 173.90321551, 173.90508269],\n ...,\n [174.49681529, 174.49676176, 174.49660126, ..., 174.24671184,\n 174.24416446, 174.24164382],\n [174.49056604, 174.49051451, 174.49035999, ..., 174.24671343,\n 174.24419773, 174.2417078 ],\n [174.48447205, 174.48442242, 174.48427358, ..., 174.2466762 ,\n 174.24419219, 174.24173298]])" + }, + { + "objectID": "index.html", + "href": "index.html", + "title": "Polire", + "section": "", + "text": "pip install polire\nThe word “interpolation” has Latin origin and is composed of two words - Inter meaning between and Polire meaning to polish.\nPolire is a collection of several spatial interpolation algorithms." + }, + { + "objectID": "index.html#polire", + "href": "index.html#polire", + "title": "Polire", + "section": "", + "text": "pip install polire\nThe word “interpolation” has Latin origin and is composed of two words - Inter meaning between and Polire meaning to polish.\nPolire is a collection of several spatial interpolation algorithms." + }, { "objectID": "examples/all_in_one.html", "href": "examples/all_in_one.html", @@ -41,34 +69,6 @@ "section": "Checking performance", "text": "Checking performance\n\nmodels = {\n \"Inverse Distance Weighting\": idw_pred,\n \"Kriging (spherical)\": kriging_pred,\n \"Spline\": spline_pred,\n \"Trend\": trend_pred,\n \"Linear Regression\": lr_pred,\n f\"{K}-Nearest Neighbors\": knn_pred,\n \"Random Forest\": rf_pred,\n \"Spatial Average\": spatial_average_pred,\n}\n\nresult = pd.DataFrame(columns=[\"RMSE\", \"MAE\", \"MAPE\", \"Neg R2\"], index=models.keys())\n\nfor model_name, y_pred in models.items():\n result.loc[model_name, \"RMSE\"] = mean_squared_error(\n y_test.ravel(), y_pred.ravel(), squared=False\n )\n result.loc[model_name, \"MAE\"] = mean_absolute_error(y_test.ravel(), y_pred.ravel())\n result.loc[model_name, \"MAPE\"] = mean_absolute_percentage_error(\n y_test.ravel(), y_pred.ravel()\n )\n result.loc[model_name, \"Neg R2\"] = -r2_score(y_test.ravel(), y_pred.ravel())\n\nresult.sort_values(\"RMSE\").style.highlight_min(axis=0, color=\"green\").format(\"{:.2f}\")\n\n\n\n\n\n\n \nRMSE\nMAE\nMAPE\nNeg R2\n\n\n\n\nKriging (spherical)\n3.10\n1.95\n0.01\n-0.97\n\n\nRandom Forest\n3.96\n2.74\n0.01\n-0.96\n\n\nSpline\n4.43\n2.85\n0.01\n-0.95\n\n\n3-Nearest Neighbors\n4.60\n3.19\n0.01\n-0.94\n\n\nSpatial Average\n6.04\n4.32\n0.02\n-0.90\n\n\nInverse Distance Weighting\n6.54\n5.04\n0.02\n-0.88\n\n\nTrend\n8.02\n6.30\n0.02\n-0.82\n\n\nLinear Regression\n8.46\n6.98\n0.03\n-0.80" }, - { - "objectID": "polire/idw/tests/IDW Initial.html", - "href": "polire/idw/tests/IDW Initial.html", - "title": "Inverse Distance Weighting (IDW) Interpolation", - "section": "", - "text": "Let us suppose we have a data that shows the variation of one quantity of interest across space. This could be equivalently viewed as { (\\(\\vec{x_1}, y_1)\\),\\((\\vec{x_2}, y_2)\\),\\((\\vec{x_3}, y_3)\\), …}, where the \\(\\vec{x_i}\\)’s represent the coordinates of the points where we have data and the \\(y_i\\)’s are the actual data at those points. We would like to perform an interpolation using these data points such that a few things are satisifed. 1. The interpolation is exact - the value at the known data points is the same as the estimated value, and 2. We would want far away points from a given source data point to receive less importance than nearby points. 3. Wikipedia has an excellent article on IDW. I am linking it here.\nWe are using the following approximation for coordinate_type being latlong_small \\(| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}\\)\n\nimport numpy as np\nimport pandas as pd\ndf = pd.read_csv('../../testdata/30-03-18.csv')\ndata = np.array(df[['longitude','latitude','value']])\n\n\ndef make_grid(X,y,res):\n y_min = y.min()-0.2\n y_max = y.max()+0.2\n x_min = X.min()-0.2\n x_max = X.max()+0.2\n x_arr = np.linspace(x_min,x_max,res)\n y_arr = np.linspace(y_min,y_max,res)\n xx,yy = np.meshgrid(x_arr,y_arr) \n return xx,yy\n\ndef idw(dataset, exponent = 2, resolution='standard', coordinate_type='euclidean',verbose='False'):\n \"\"\"\n Here X is the set of spatial locations - Usually assumed to be Lat-Long\n To be extended to higher dimenstions y - estimated value , exponenet - how\n much weight to assign to far off locations to be estimated for each data point, \n extent - interpolate over a grid - what is xmax xmin ymax ymin\n \"\"\"\n if coordinate_type == 'latlong_small':\n \"\"\"\n Assume that the Earth is a Sphere, and use polar coordinates\n $| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$\n \"\"\"\n return \"To be done later\"\n if coordinate_type == 'latlong_large':\n \"\"\"\n Code to be written after understanding all the projections.\n \"\"\"\n return \"To be done later\"\n if coordinate_type==\"euclidean\":\n \n# print(dataset)\n X = dataset[:,0]\n y = dataset[:,1]\n if resolution=='high':\n xx,yy = make_grid(X,y,1000)\n \n if resolution=='low':\n xx,yy = make_grid(X,y,10)\n \n if resolution=='standard':\n xx,yy = make_grid(X,y,100)\n \n new = []\n new_arr = dataset\n for points in new_arr:\n mindist = np.inf\n val = 0\n for j in range(len(yy)):\n temp = yy[j][0]\n for i in range(len(xx[0])):\n dist = np.linalg.norm(np.array([xx[0][i],temp]) - points[:2])\n if dist<mindist:\n mindist = dist\n val = (i,j)\n new.append((points,val))\n print(new)\n new_grid = np.zeros((len(xx),len(yy)))\n for i in range(len(new)):\n x = new[i][1][0]\n y = new[i][1][1]\n new_grid[x][y] = new[i][0][2]\n print(new[i])\n x_nz,y_nz = np.nonzero(new_grid)\n list_nz = []\n for i in range(len(x_nz)):\n list_nz.append((x_nz[i],y_nz[i]))\n \n final = np.copy(new_grid)\n \n for i in range(len(xx[0])):\n for j in range(len(yy)):\n normalise = 0\n if (i,j) in list_nz:\n continue\n else:\n \"\"\"\n Could potentially have a divide by zero error here\n Use a try except clause\n \"\"\"\n for elem in range(len(x_nz)):\n source = np.array([x_nz[elem],y_nz[elem]])\n target = np.array([xx[0][i],yy[j][0]])\n dist = (np.abs(xx[0][source[0]] - target[0])**exponent + np.abs(yy[source[1]][0] - target[1])**exponent)**(1/exponent)\n final[i][j]+=new_grid[x_nz[elem],y_nz[elem]]/dist\n normalise+=1/(dist)\n final[i][j]/=normalise\n \n return final\n\n\nidw(data).shape\n\n[(array([ 77.234291, 28.581197, 194. ]), (60, 39)), (array([ 77.245721, 28.739434, 267. ]), (62, 60)), (array([ 77.101961, 28.822931, 273. ]), (42, 72)), (array([ 76.991463, 28.620806, 129. ]), (27, 44)), (array([ 77.0325413, 28.60909 , 176. ]), (33, 42)), (array([ 77.072196, 28.570859, 172. ]), (38, 37)), (array([ 77.1670103, 28.5646102, 168. ]), (51, 36)), (array([ 77.1180053, 28.5627763, 105. ]), (45, 36)), (array([ 77.272404, 28.530782, 203. ]), (66, 32)), (array([ 77.26075 , 28.563827, 192. ]), (64, 36)), (array([77.0996943, 28.610304 , 95. ]), (42, 43)), (array([ 77.2273074, 28.5918245, 148. ]), (59, 40)), (array([ 77.09211 , 28.732219, 203. ]), (41, 59)), (array([ 77.317084, 28.668672, 221. ]), (72, 51)), (array([ 77.1585447, 28.6573814, 141. ]), (50, 49)), (array([ 77.2011573, 28.6802747, 192. ]), (56, 52)), (array([ 77.237372, 28.612561, 203. ]), (61, 43)), (array([ 77.305651, 28.632707, 152. ]), (70, 46)), (array([ 77.1473105, 28.6514781, 185. ]), (49, 48)), (array([ 77.16482 , 28.699254, 290. ]), (51, 55)), (array([ 77.170221, 28.728722, 273. ]), (52, 59)), (array([ 77.2005604, 28.6372688, 173. ]), (56, 46)), (array([ 77.2011573, 28.7256504, 269. ]), (56, 58)), (array([ 77.136777, 28.669119, 160. ]), (47, 51)), (array([77.267246, 28.49968 , 78. ]), (65, 27)), (array([ 77.2494387, 28.6316945, 211. ]), (62, 45)), (array([ 77.2735737, 28.5512005, 252. ]), (66, 34)), (array([ 77.2159377, 28.5504249, 133. ]), (58, 34)), (array([77.1112615, 28.7500499, 77. ]), (44, 62)), (array([77.22445, 28.63576, 96. ]), (59, 46))]\n(array([ 77.234291, 28.581197, 194. ]), (60, 39))\n(array([ 77.245721, 28.739434, 267. ]), (62, 60))\n(array([ 77.101961, 28.822931, 273. ]), (42, 72))\n(array([ 76.991463, 28.620806, 129. ]), (27, 44))\n(array([ 77.0325413, 28.60909 , 176. ]), (33, 42))\n(array([ 77.072196, 28.570859, 172. ]), (38, 37))\n(array([ 77.1670103, 28.5646102, 168. ]), (51, 36))\n(array([ 77.1180053, 28.5627763, 105. ]), (45, 36))\n(array([ 77.272404, 28.530782, 203. ]), (66, 32))\n(array([ 77.26075 , 28.563827, 192. ]), (64, 36))\n(array([77.0996943, 28.610304 , 95. ]), (42, 43))\n(array([ 77.2273074, 28.5918245, 148. ]), (59, 40))\n(array([ 77.09211 , 28.732219, 203. ]), (41, 59))\n(array([ 77.317084, 28.668672, 221. ]), (72, 51))\n(array([ 77.1585447, 28.6573814, 141. ]), (50, 49))\n(array([ 77.2011573, 28.6802747, 192. ]), (56, 52))\n(array([ 77.237372, 28.612561, 203. ]), (61, 43))\n(array([ 77.305651, 28.632707, 152. ]), (70, 46))\n(array([ 77.1473105, 28.6514781, 185. ]), (49, 48))\n(array([ 77.16482 , 28.699254, 290. ]), (51, 55))\n(array([ 77.170221, 28.728722, 273. ]), (52, 59))\n(array([ 77.2005604, 28.6372688, 173. ]), (56, 46))\n(array([ 77.2011573, 28.7256504, 269. ]), (56, 58))\n(array([ 77.136777, 28.669119, 160. ]), (47, 51))\n(array([77.267246, 28.49968 , 78. ]), (65, 27))\n(array([ 77.2494387, 28.6316945, 211. ]), (62, 45))\n(array([ 77.2735737, 28.5512005, 252. ]), (66, 34))\n(array([ 77.2159377, 28.5504249, 133. ]), (58, 34))\n(array([77.1112615, 28.7500499, 77. ]), (44, 62))\n(array([77.22445, 28.63576, 96. ]), (59, 46))\n\n\n(100, 100)\n\n\n\ntemp = data[10]\n\n\nnp.where(data==temp)\n\n(array([10, 10, 10]), array([0, 1, 2]))\n\n\n\nresult = np.nonzero(data==temp)\n\n\nnp.unique(result[0])[0]\n\n10\n\n\n\nlistOfCoordinates= list(zip(result[0], result[1]))\n\n\nlistOfCoordinates\n\n[(10, 0), (10, 1), (10, 2)]" - }, - { - "objectID": "polire/gp/tests/GP interpolation.html", - "href": "polire/gp/tests/GP interpolation.html", - "title": "Polire", - "section": "", - "text": "from pykrige import OrdinaryKriging\n\n\nimport pandas as pd\nimport numpy as np\n\n\nok = OrdinaryKriging(data[:,0],data[:,1],data[:,2])\nok.ex\n\n\na,b = ok.execute('grid',x[0],y[:,0])\n\n\nfrom pykrige import OrdinaryKriging\nimport pandas as pd\nimport numpy as np\n\ndef ordinary_kriging(dataset, resolution='standard', coordinate_type='euclidean',verbose='False',method='grid', isvariance = False):\n if coordinate_type == 'latlong_small':\n \"\"\"\n Assume that the Earth is a Sphere, and use polar coordinates\n $| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$\n \"\"\"\n return \"To be done later\"\n if coordinate_type == 'latlong_large':\n \"\"\"\n Code to be written after understanding all the projections.\n \"\"\"\n return \"To be done later\"\n if coordinate_type==\"euclidean\":\n \n ok = OrdinaryKriging(dataset[:,0],dataset[:,1],dataset[:,2])\n X = dataset[:,0]\n y = dataset[:,1]\n \n if resolution=='high':\n xx,yy = make_grid(X,y,1000)\n \n elif resolution=='low':\n xx,yy = make_grid(X,y,10)\n \n elif resolution=='standard':\n xx,yy = make_grid(X,y,100)\n \n else:\n print('Value Error - Resolution can only be one of \\nhigh, low or standard')\n \n values, variances = ok.execute(method, xx[0], yy[:,0])\n \n if isvariance:\n return values, variances\n else:\n del variances\n return np.array(values)\n\n\nordinary_kriging(data)\n\narray([[129.94984945, 129.7682324 , 129.58820662, ..., 159.34079485,\n 159.99175016, 160.63241067],\n [130.22090025, 130.03615966, 129.8529146 , ..., 159.9575165 ,\n 160.61228126, 161.25625641],\n [130.50105231, 130.31324536, 130.12683652, ..., 160.59265384,\n 161.25084023, 161.8977369 ],\n ...,\n [207.22133238, 207.82739139, 208.44615116, ..., 248.64646661,\n 248.3790241 , 248.11033441],\n [207.92838926, 208.53490708, 209.15376273, ..., 248.91678379,\n 248.65601627, 248.39371596],\n [208.61942088, 209.22595474, 209.84445913, ..., 249.17442481,\n 248.9203453 , 248.66446245]])\n\n\n\nWhat does ok(‘points’) really do?\nSpecifically test when points aren’t really passed - they are let’s say the point of an array\nReturns the diagonal matrix of all these coordinates\n\n\nordinary_kriging(data,method='points')\n\narray([129.94984945, 130.03615966, 130.12683652, 130.22219703,\n 130.32258826, 130.42839089, 130.54002324, 130.65794596,\n 130.7826674 , 130.91474976, 131.05481629, 131.20355964,\n 131.36175158, 131.53025441, 131.71003442, 131.90217771,\n 132.107909 , 132.32861401, 132.56586607, 132.82145795,\n 133.0974399 , 133.39616477, 133.72034153, 134.07309736,\n 134.45804822, 134.87937482, 135.34189663, 135.85112772,\n 136.41328222, 137.03517039, 137.72388496, 138.48612122,\n 139.326921 , 140.24763047, 141.24300526, 142.29757046,\n 143.37881815, 144.38425962, 144.49187978, 143.1202101 ,\n 141.66667134, 140.45686022, 139.66795657, 142.48270308,\n 147.03665055, 151.8487008 , 156.90272514, 162.25791164,\n 168.04938768, 173.63870768, 180.93567147, 190.3440156 ,\n 199.86834472, 208.48375248, 215.75635742, 222.1915652 ,\n 228.08641413, 233.15249702, 236.89713686, 239.83524192,\n 242.45744315, 244.57483343, 245.52139699, 245.88236757,\n 246.12295211, 246.3306567 , 246.52369882, 246.70598807,\n 246.87792737, 247.03919426, 247.18952217, 247.3288843 ,\n 247.45749059, 247.57573348, 247.68412862, 247.78326467,\n 247.87376505, 247.95626051, 248.03137024, 248.09968963,\n 248.16178271, 248.21817801, 248.26936683, 248.31580309,\n 248.35790422, 248.39605277, 248.43059841, 248.46186013,\n 248.49012851, 248.51566797, 248.53871897, 248.55950011,\n 248.57821004, 248.59502931, 248.61012204, 248.62363741,\n 248.63571111, 248.64646661, 248.65601627, 248.66446245])\n\n\n\ndef make_grid(X,y,res):\n y_min = y.min()-0.2\n y_max = y.max()+0.2\n x_min = X.min()-0.2\n x_max = X.max()+0.2\n x_arr = np.linspace(x_min,x_max,res)\n y_arr = np.linspace(y_min,y_max,res)\n xx,yy = np.meshgrid(x_arr,y_arr) \n return xx,yy\nx, y = make_grid(data[:,0],data[:,1],100)" - }, - { - "objectID": "index.html", - "href": "index.html", - "title": "Polire", - "section": "", - "text": "pip install polire\nThe word “interpolation” has Latin origin and is composed of two words - Inter meaning between and Polire meaning to polish.\nPolire is a collection of several spatial interpolation algorithms." - }, - { - "objectID": "index.html#polire", - "href": "index.html#polire", - "title": "Polire", - "section": "", - "text": "pip install polire\nThe word “interpolation” has Latin origin and is composed of two words - Inter meaning between and Polire meaning to polish.\nPolire is a collection of several spatial interpolation algorithms." - }, { "objectID": "polire/kriging/tests/Kriging Interpolation.html", "href": "polire/kriging/tests/Kriging Interpolation.html", @@ -77,10 +77,10 @@ "text": "from pykrige import OrdinaryKriging\n\n\nimport pandas as pd\nimport numpy as np\n\n\nok = OrdinaryKriging(data[:,0],data[:,1],data[:,2])\nok.ex\n\n\na,b = ok.execute('grid',x[0],y[:,0])\n\n\nfrom pykrige import OrdinaryKriging\nimport pandas as pd\nimport numpy as np\n\ndef ordinary_kriging(dataset, resolution='standard', coordinate_type='euclidean',verbose='False',method='grid', isvariance = False):\n if coordinate_type == 'latlong_small':\n \"\"\"\n Assume that the Earth is a Sphere, and use polar coordinates\n $| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$\n \"\"\"\n return \"To be done later\"\n if coordinate_type == 'latlong_large':\n \"\"\"\n Code to be written after understanding all the projections.\n \"\"\"\n return \"To be done later\"\n if coordinate_type==\"euclidean\":\n \n ok = OrdinaryKriging(dataset[:,0],dataset[:,1],dataset[:,2])\n X = dataset[:,0]\n y = dataset[:,1]\n \n if resolution=='high':\n xx,yy = make_grid(X,y,1000)\n \n elif resolution=='low':\n xx,yy = make_grid(X,y,10)\n \n elif resolution=='standard':\n xx,yy = make_grid(X,y,100)\n \n else:\n print('Value Error - Resolution can only be one of \\nhigh, low or standard')\n \n values, variances = ok.execute(method, xx[0], yy[:,0])\n \n if isvariance:\n return values, variances\n else:\n del variances\n return np.array(values)\n\n\nordinary_kriging(data)\n\narray([[129.94984945, 129.7682324 , 129.58820662, ..., 159.34079485,\n 159.99175016, 160.63241067],\n [130.22090025, 130.03615966, 129.8529146 , ..., 159.9575165 ,\n 160.61228126, 161.25625641],\n [130.50105231, 130.31324536, 130.12683652, ..., 160.59265384,\n 161.25084023, 161.8977369 ],\n ...,\n [207.22133238, 207.82739139, 208.44615116, ..., 248.64646661,\n 248.3790241 , 248.11033441],\n [207.92838926, 208.53490708, 209.15376273, ..., 248.91678379,\n 248.65601627, 248.39371596],\n [208.61942088, 209.22595474, 209.84445913, ..., 249.17442481,\n 248.9203453 , 248.66446245]])\n\n\n\nWhat does ok(‘points’) really do?\nSpecifically test when points aren’t really passed - they are let’s say the point of an array\nReturns the diagonal matrix of all these coordinates\n\n\nordinary_kriging(data,method='points')\n\narray([129.94984945, 130.03615966, 130.12683652, 130.22219703,\n 130.32258826, 130.42839089, 130.54002324, 130.65794596,\n 130.7826674 , 130.91474976, 131.05481629, 131.20355964,\n 131.36175158, 131.53025441, 131.71003442, 131.90217771,\n 132.107909 , 132.32861401, 132.56586607, 132.82145795,\n 133.0974399 , 133.39616477, 133.72034153, 134.07309736,\n 134.45804822, 134.87937482, 135.34189663, 135.85112772,\n 136.41328222, 137.03517039, 137.72388496, 138.48612122,\n 139.326921 , 140.24763047, 141.24300526, 142.29757046,\n 143.37881815, 144.38425962, 144.49187978, 143.1202101 ,\n 141.66667134, 140.45686022, 139.66795657, 142.48270308,\n 147.03665055, 151.8487008 , 156.90272514, 162.25791164,\n 168.04938768, 173.63870768, 180.93567147, 190.3440156 ,\n 199.86834472, 208.48375248, 215.75635742, 222.1915652 ,\n 228.08641413, 233.15249702, 236.89713686, 239.83524192,\n 242.45744315, 244.57483343, 245.52139699, 245.88236757,\n 246.12295211, 246.3306567 , 246.52369882, 246.70598807,\n 246.87792737, 247.03919426, 247.18952217, 247.3288843 ,\n 247.45749059, 247.57573348, 247.68412862, 247.78326467,\n 247.87376505, 247.95626051, 248.03137024, 248.09968963,\n 248.16178271, 248.21817801, 248.26936683, 248.31580309,\n 248.35790422, 248.39605277, 248.43059841, 248.46186013,\n 248.49012851, 248.51566797, 248.53871897, 248.55950011,\n 248.57821004, 248.59502931, 248.61012204, 248.62363741,\n 248.63571111, 248.64646661, 248.65601627, 248.66446245])\n\n\n\ndef make_grid(X,y,res):\n y_min = y.min()-0.2\n y_max = y.max()+0.2\n x_min = X.min()-0.2\n x_max = X.max()+0.2\n x_arr = np.linspace(x_min,x_max,res)\n y_arr = np.linspace(y_min,y_max,res)\n xx,yy = np.meshgrid(x_arr,y_arr) \n return xx,yy\nx, y = make_grid(data[:,0],data[:,1],100)" }, { - "objectID": "polire/idw/tests/Numpy+IDWTest.html", - "href": "polire/idw/tests/Numpy+IDWTest.html", - "title": "Polire", + "objectID": "polire/idw/tests/IDW Initial.html", + "href": "polire/idw/tests/IDW Initial.html", + "title": "Inverse Distance Weighting (IDW) Interpolation", "section": "", - "text": "import numpy as np\n\n\na = np.array([[1,2,3],[4,5,6]])\n\n\na\n\narray([[1, 2, 3],\n [4, 5, 6]])\n\n\n\nb = np.array([[2,3,4],[5,6,9]])\n\n\nb\n\narray([[2, 3, 4],\n [5, 6, 9]])\n\n\n\na\n\narray([[1, 2, 3],\n [4, 5, 6]])\n\n\n\na - b\n\narray([[-1, -1, -1],\n [-1, -1, -3]])\n\n\n\nnp.argmin([np.linalg.norm(a[i] - b[i]) for i in range(len(a))])\n\n1.7320508075688772\n\n\n\nnp.min?\n\n\n\n\"\"\"\nThis is a module for IDW Spatial Interpolation\n\"\"\"\nimport numpy as np\nimport pandas as pd\nfrom copy import deepcopy\nclass idw():\n \"\"\" A class that is declared for performing IDW Interpolation.\n For more information on how this method works, kindly refer to\n https://en.wikipedia.org/wiki/Inverse_distance_weighting\n\n Parameters\n ----------\n exponent : positive float, optional\n The rate of fall of values from source data points.\n Higher the exponent, lower is the value when we move\n across space. Default value is 2.\n resolution: str, optional\n Decides the smoothness of the interpolation. Note that\n interpolation is done over a grid. Higher the resolution\n means more grid cells and more time for interpolation.\n Default value is 'standard'\n coordinate_type: str, optional\n Decides the distance metric to be used, while performing\n interpolation. Euclidean by default. \n \"\"\"\n def __init__(self, exponent = 2, resolution = 'standard', coordinate_type='Euclidean'):\n \n self.exponent = exponent\n self.resolution = resolution\n self.coordinate_type = coordinate_type\n self.interpolated_values = None\n self.x_grid = None\n self.y_grid = None\n\n def make_grid(self, x, y, res, offset=0.2):\n\n \"\"\" This function returns the grid to perform interpolation on.\n This function is used inside the fit() attribute of the idw class.\n \n Parameters\n ----------\n x: array-like, shape(n_samples,)\n The first coordinate values of all points where\n ground truth is available\n y: array-like, shape(n_samples,)\n The second coordinate values of all points where\n ground truth is available\n res: int\n The resolution value\n offset: float, optional\n A value between 0 and 0.5 that specifies the extra interpolation to be done\n Default is 0.2\n \n Returns\n -------\n xx : {array-like, 2D}, shape (n_samples, n_samples)\n yy : {array-like, 2D}, shape (n_samples, n_samples)\n \"\"\"\n y_min = y.min() - offset\n y_max = y.max()+ offset\n x_min = x.min()-offset\n x_max = x.max()+offset\n x_arr = np.linspace(x_min,x_max,res)\n y_arr = np.linspace(y_min,y_max,res)\n xx,yy = np.meshgrid(x_arr,y_arr) \n return xx,yy\n\n \n def fit(self, X, y):\n \"\"\" The function call to fit the model on the given data. \n Parameters\n ----------\n X: {array-like, 2D matrix}, shape(n_samples, 2)\n The set of all coordinates, where we have ground truth\n values\n y: array-like, shape(n_samples,)\n The set of all the ground truth values using which\n we perform interpolation\n\n Returns\n -------\n self : object\n Returns self\n \"\"\"\n\n# if self.coordinate_type == 'latlong_small':\n# \"\"\"\n# Use the conversions and projections for small changes in LatLong\n# \"\"\"\n# print (\"To be done later\")\n# return self\n\n# if self.coordinate_type == 'latlong_large':\n# \"\"\"\n# Code to be written after understanding all the projections.\n# \"\"\"\n# print (\"To be done later\")\n# return self\n\n if self.coordinate_type==\"Euclidean\":\n \n X = deepcopy(np.c_[X,y])\n\n if self.resolution=='high':\n xx,yy = self.make_grid(X,y,1000)\n \n if self.resolution=='low':\n xx,yy = self.make_grid(X,y,10)\n \n if self.resolution=='standard':\n xx,yy = self.make_grid(X,y,100)\n\n new = []\n new_arr = deepcopy(X)\n for points in new_arr:\n min_dist = np.inf\n val = 0\n for j in range(len(yy)):\n temp = yy[j][0]\n for i in range(len(xx[0])):\n dist = np.linalg.norm(np.array([xx[0][i],temp]) - points[:2])\n if dist<min_dist:\n min_dist = dist\n val = (i,j)\n new.append((points,val))\n new_grid = np.zeros((len(xx),len(yy)))\n for i in range(len(new)):\n x = new[i][1][0]\n y = new[i][1][1]\n new_grid[x][y] = new[i][0][2]\n x_nz,y_nz = np.nonzero(new_grid)\n list_nz = []\n for i in range(len(x_nz)):\n list_nz.append((x_nz[i],y_nz[i]))\n final = np.copy(new_grid)\n for i in range(len(xx[0])):\n for j in range(len(yy)):\n normalise = 0\n if (i,j) in list_nz:\n continue\n else:\n for elem in range(len(x_nz)):\n source = np.array([x_nz[elem],y_nz[elem]])\n target = np.array([xx[0][i],yy[j][0]])\n dist = (np.abs(xx[0][source[0]] - target[0])**self.exponent + np.abs(yy[source[1]][0] - target[1])**self.exponent)**(1/self.exponent)\n final[i][j]+=new_grid[x_nz[elem],y_nz[elem]]/dist\n normalise+=1/(dist)\n final[i][j]/=normalise\n self.interpolated_values = final\n self.x_grid = xx\n self.y_grid = yy\n \n return self\n\n# def predict(self, X):\n# \"\"\" The function call to predict using the interpolated data\n# Parameters\n# ----------\n# X: {array-like, 2D matrix}, shape(n_samples, 2)\n# The set of all coordinates, where we have ground truth\n# values\n \n\n# Returns\n# -------\n# y: array-like, shape(n_samples,)\n# The set of all the ground truth values using which\n# we perform interpolation \n# \"\"\"\n# if self.coordinate_type == 'Euclidean':\n# for i in range(self.x_grid[0]):\n# for j in range()\n \n# else:\n# print(\"Will be done later\")\n# return \n \n \n# self.x_grid\n\n\n\na = idw()\nimport pandas as pd\ndf = pd.read_csv('../../testdata/30-03-18.csv')\ndata = np.array(df[['longitude','latitude','value']])\na.fit(data[:,:2],data[:,2])\n\n<__main__.idw at 0x7f36db6f9c88>\n\n\n\na.interpolated_values\n\narray([[171.89189189, 171.89597641, 171.90813547, ..., 173.89050472,\n 173.89261459, 173.89466512],\n [171.77142857, 171.77625338, 171.79060316, ..., 173.89585441,\n 173.89787202, 173.89983245],\n [171.63636364, 171.64211895, 171.65921778, ..., 173.9012935 ,\n 173.90321551, 173.90508269],\n ...,\n [174.49681529, 174.49676176, 174.49660126, ..., 174.24671184,\n 174.24416446, 174.24164382],\n [174.49056604, 174.49051451, 174.49035999, ..., 174.24671343,\n 174.24419773, 174.2417078 ],\n [174.48447205, 174.48442242, 174.48427358, ..., 174.2466762 ,\n 174.24419219, 174.24173298]])" + "text": "Let us suppose we have a data that shows the variation of one quantity of interest across space. This could be equivalently viewed as { (\\(\\vec{x_1}, y_1)\\),\\((\\vec{x_2}, y_2)\\),\\((\\vec{x_3}, y_3)\\), …}, where the \\(\\vec{x_i}\\)’s represent the coordinates of the points where we have data and the \\(y_i\\)’s are the actual data at those points. We would like to perform an interpolation using these data points such that a few things are satisifed. 1. The interpolation is exact - the value at the known data points is the same as the estimated value, and 2. We would want far away points from a given source data point to receive less importance than nearby points. 3. Wikipedia has an excellent article on IDW. I am linking it here.\nWe are using the following approximation for coordinate_type being latlong_small \\(| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}\\)\n\nimport numpy as np\nimport pandas as pd\ndf = pd.read_csv('../../testdata/30-03-18.csv')\ndata = np.array(df[['longitude','latitude','value']])\n\n\ndef make_grid(X,y,res):\n y_min = y.min()-0.2\n y_max = y.max()+0.2\n x_min = X.min()-0.2\n x_max = X.max()+0.2\n x_arr = np.linspace(x_min,x_max,res)\n y_arr = np.linspace(y_min,y_max,res)\n xx,yy = np.meshgrid(x_arr,y_arr) \n return xx,yy\n\ndef idw(dataset, exponent = 2, resolution='standard', coordinate_type='euclidean',verbose='False'):\n \"\"\"\n Here X is the set of spatial locations - Usually assumed to be Lat-Long\n To be extended to higher dimenstions y - estimated value , exponenet - how\n much weight to assign to far off locations to be estimated for each data point, \n extent - interpolate over a grid - what is xmax xmin ymax ymin\n \"\"\"\n if coordinate_type == 'latlong_small':\n \"\"\"\n Assume that the Earth is a Sphere, and use polar coordinates\n $| \\vec{r_2}− \\vec{r_1}| ≈ \\text{R }\\times \\sqrt[]{(Lat_2 - Lat_1)^{2} + (Long_2 - Long_1)^{2}}$\n \"\"\"\n return \"To be done later\"\n if coordinate_type == 'latlong_large':\n \"\"\"\n Code to be written after understanding all the projections.\n \"\"\"\n return \"To be done later\"\n if coordinate_type==\"euclidean\":\n \n# print(dataset)\n X = dataset[:,0]\n y = dataset[:,1]\n if resolution=='high':\n xx,yy = make_grid(X,y,1000)\n \n if resolution=='low':\n xx,yy = make_grid(X,y,10)\n \n if resolution=='standard':\n xx,yy = make_grid(X,y,100)\n \n new = []\n new_arr = dataset\n for points in new_arr:\n mindist = np.inf\n val = 0\n for j in range(len(yy)):\n temp = yy[j][0]\n for i in range(len(xx[0])):\n dist = np.linalg.norm(np.array([xx[0][i],temp]) - points[:2])\n if dist<mindist:\n mindist = dist\n val = (i,j)\n new.append((points,val))\n print(new)\n new_grid = np.zeros((len(xx),len(yy)))\n for i in range(len(new)):\n x = new[i][1][0]\n y = new[i][1][1]\n new_grid[x][y] = new[i][0][2]\n print(new[i])\n x_nz,y_nz = np.nonzero(new_grid)\n list_nz = []\n for i in range(len(x_nz)):\n list_nz.append((x_nz[i],y_nz[i]))\n \n final = np.copy(new_grid)\n \n for i in range(len(xx[0])):\n for j in range(len(yy)):\n normalise = 0\n if (i,j) in list_nz:\n continue\n else:\n \"\"\"\n Could potentially have a divide by zero error here\n Use a try except clause\n \"\"\"\n for elem in range(len(x_nz)):\n source = np.array([x_nz[elem],y_nz[elem]])\n target = np.array([xx[0][i],yy[j][0]])\n dist = (np.abs(xx[0][source[0]] - target[0])**exponent + np.abs(yy[source[1]][0] - target[1])**exponent)**(1/exponent)\n final[i][j]+=new_grid[x_nz[elem],y_nz[elem]]/dist\n normalise+=1/(dist)\n final[i][j]/=normalise\n \n return final\n\n\nidw(data).shape\n\n[(array([ 77.234291, 28.581197, 194. ]), (60, 39)), (array([ 77.245721, 28.739434, 267. ]), (62, 60)), (array([ 77.101961, 28.822931, 273. ]), (42, 72)), (array([ 76.991463, 28.620806, 129. ]), (27, 44)), (array([ 77.0325413, 28.60909 , 176. ]), (33, 42)), (array([ 77.072196, 28.570859, 172. ]), (38, 37)), (array([ 77.1670103, 28.5646102, 168. ]), (51, 36)), (array([ 77.1180053, 28.5627763, 105. ]), (45, 36)), (array([ 77.272404, 28.530782, 203. ]), (66, 32)), (array([ 77.26075 , 28.563827, 192. ]), (64, 36)), (array([77.0996943, 28.610304 , 95. ]), (42, 43)), (array([ 77.2273074, 28.5918245, 148. ]), (59, 40)), (array([ 77.09211 , 28.732219, 203. ]), (41, 59)), (array([ 77.317084, 28.668672, 221. ]), (72, 51)), (array([ 77.1585447, 28.6573814, 141. ]), (50, 49)), (array([ 77.2011573, 28.6802747, 192. ]), (56, 52)), (array([ 77.237372, 28.612561, 203. ]), (61, 43)), (array([ 77.305651, 28.632707, 152. ]), (70, 46)), (array([ 77.1473105, 28.6514781, 185. ]), (49, 48)), (array([ 77.16482 , 28.699254, 290. ]), (51, 55)), (array([ 77.170221, 28.728722, 273. ]), (52, 59)), (array([ 77.2005604, 28.6372688, 173. ]), (56, 46)), (array([ 77.2011573, 28.7256504, 269. ]), (56, 58)), (array([ 77.136777, 28.669119, 160. ]), (47, 51)), (array([77.267246, 28.49968 , 78. ]), (65, 27)), (array([ 77.2494387, 28.6316945, 211. ]), (62, 45)), (array([ 77.2735737, 28.5512005, 252. ]), (66, 34)), (array([ 77.2159377, 28.5504249, 133. ]), (58, 34)), (array([77.1112615, 28.7500499, 77. ]), (44, 62)), (array([77.22445, 28.63576, 96. ]), (59, 46))]\n(array([ 77.234291, 28.581197, 194. ]), (60, 39))\n(array([ 77.245721, 28.739434, 267. ]), (62, 60))\n(array([ 77.101961, 28.822931, 273. ]), (42, 72))\n(array([ 76.991463, 28.620806, 129. ]), (27, 44))\n(array([ 77.0325413, 28.60909 , 176. ]), (33, 42))\n(array([ 77.072196, 28.570859, 172. ]), (38, 37))\n(array([ 77.1670103, 28.5646102, 168. ]), (51, 36))\n(array([ 77.1180053, 28.5627763, 105. ]), (45, 36))\n(array([ 77.272404, 28.530782, 203. ]), (66, 32))\n(array([ 77.26075 , 28.563827, 192. ]), (64, 36))\n(array([77.0996943, 28.610304 , 95. ]), (42, 43))\n(array([ 77.2273074, 28.5918245, 148. ]), (59, 40))\n(array([ 77.09211 , 28.732219, 203. ]), (41, 59))\n(array([ 77.317084, 28.668672, 221. ]), (72, 51))\n(array([ 77.1585447, 28.6573814, 141. ]), (50, 49))\n(array([ 77.2011573, 28.6802747, 192. ]), (56, 52))\n(array([ 77.237372, 28.612561, 203. ]), (61, 43))\n(array([ 77.305651, 28.632707, 152. ]), (70, 46))\n(array([ 77.1473105, 28.6514781, 185. ]), (49, 48))\n(array([ 77.16482 , 28.699254, 290. ]), (51, 55))\n(array([ 77.170221, 28.728722, 273. ]), (52, 59))\n(array([ 77.2005604, 28.6372688, 173. ]), (56, 46))\n(array([ 77.2011573, 28.7256504, 269. ]), (56, 58))\n(array([ 77.136777, 28.669119, 160. ]), (47, 51))\n(array([77.267246, 28.49968 , 78. ]), (65, 27))\n(array([ 77.2494387, 28.6316945, 211. ]), (62, 45))\n(array([ 77.2735737, 28.5512005, 252. ]), (66, 34))\n(array([ 77.2159377, 28.5504249, 133. ]), (58, 34))\n(array([77.1112615, 28.7500499, 77. ]), (44, 62))\n(array([77.22445, 28.63576, 96. ]), (59, 46))\n\n\n(100, 100)\n\n\n\ntemp = data[10]\n\n\nnp.where(data==temp)\n\n(array([10, 10, 10]), array([0, 1, 2]))\n\n\n\nresult = np.nonzero(data==temp)\n\n\nnp.unique(result[0])[0]\n\n10\n\n\n\nlistOfCoordinates= list(zip(result[0], result[1]))\n\n\nlistOfCoordinates\n\n[(10, 0), (10, 1), (10, 2)]" } ] \ No newline at end of file diff --git a/sitemap.xml b/sitemap.xml index df06753..4a106fa 100644 --- a/sitemap.xml +++ b/sitemap.xml @@ -1,27 +1,27 @@ - https://github.com/sustainability-lab/polire/examples/all_in_one.html - 2023-08-24T06:36:44.052Z + https://github.com/sustainability-lab/polire/polire/gp/tests/GP interpolation.html + 2023-10-28T00:18:35.498Z - https://github.com/sustainability-lab/polire/polire/idw/tests/IDW Initial.html - 2023-08-24T06:36:43.036Z + https://github.com/sustainability-lab/polire/polire/idw/tests/Numpy+IDWTest.html + 2023-10-28T00:18:34.762Z - https://github.com/sustainability-lab/polire/polire/gp/tests/GP interpolation.html - 2023-08-24T06:36:42.204Z + https://github.com/sustainability-lab/polire/index.html + 2023-10-28T00:18:34.038Z - https://github.com/sustainability-lab/polire/index.html - 2023-08-24T06:36:41.416Z + https://github.com/sustainability-lab/polire/examples/all_in_one.html + 2023-10-28T00:18:33.390Z https://github.com/sustainability-lab/polire/polire/kriging/tests/Kriging Interpolation.html - 2023-08-24T06:36:42.600Z + 2023-10-28T00:18:34.378Z - https://github.com/sustainability-lab/polire/polire/idw/tests/Numpy+IDWTest.html - 2023-08-24T06:36:43.468Z + https://github.com/sustainability-lab/polire/polire/idw/tests/IDW Initial.html + 2023-10-28T00:18:35.134Z