diff --git a/README.md b/README.md index 841f8a65..002c5b7c 100644 --- a/README.md +++ b/README.md @@ -69,7 +69,6 @@ First, copy `.env.sample` file to `.env`, and ensure the configuration values ar |`REACT_APP_TILER_ROOT`| Optional | The root URL for the data tiler API, if not hosted from the domain of the STAC API. |`REACT_APP_IMAGE_API_ROOT`| PC APIs pcfunc endpoint | The root URL for the image data API for animations. |`REACT_APP_AZMAPS_CLIENT_ID`| Retrieve from Azure Portal | The Client ID used to authenticate against Azure Maps. -|`REACT_APP_HUB_URL`| Optional. URL to root Hub instance | Used to enable a request to launch the Hub with a specific git hosted file. |`REACT_APP_ONEDS_TENANT_KEY`| Lookup at | Telemetry key (not needed for dev) |`REACT_APP_AUTH_URL`| Optional. URL to root pc-session-api instance | Used to enable login work. diff --git a/cypress/e2e/explorer/selector.cy.js b/cypress/e2e/explorer/selector.cy.js index 69f4e549..85398142 100644 --- a/cypress/e2e/explorer/selector.cy.js +++ b/cypress/e2e/explorer/selector.cy.js @@ -74,7 +74,6 @@ describe("Explorer selector tests", () => { cy.getBySel("item-snippet-button").click(); cy.contains("import planetary_computer"); cy.contains("Copy"); - cy.contains("Open Hub").focus().type("{esc}"); // Typing escape has closed the dialog cy.contains("import planetary_computer").should("not.exist"); diff --git a/docs/concepts/computing.md b/docs/concepts/computing.md index 7be0b1cd..b5595852 100644 --- a/docs/concepts/computing.md +++ b/docs/concepts/computing.md @@ -1,50 +1,14 @@ -# Computing on the Planetary Computer +# Computing with the Planetary Computer The core components of the Planetary Computer are the datasets and APIs for querying them. This document provides an overview of the various ways you can compute on data hosted by the Planetary Computer. Regardless of how you compute on the data, to ensure maximum efficiency you should locate your compute as close to the data as possible. Most of the Planetary Computer Data Catalog is hosted in Azure's **West Europe** region, so your compute should be there too. -## Use our JupyterHub - -The [Planetary Computer Hub](https://planetarycomputer.microsoft.com/compute) is a [JupyterHub](https://jupyterhub.readthedocs.io/en/stable/) deployment in the West Europe Azure region. This is the easiest way to get started with computing on the Planetary Computer. -That said, the Planetary Computer Hub is focused mainly on convenience. We recommend it for prototypes and exploration, but production workloads should use one of options using your own compute detailed below. - - -```{note} You'll need to [request access](https://planetarycomputer.microsoft.com/account/request) to use the Planetary Computer Hub. -``` - -Once approved, you can log into the JupyterHub with your credentials. You'll get a computing environment that includes standard scientific and geospatial packages from one of the [Pangeo Docker Images](https://github.com/pangeo-data/pangeo-docker-images#pangeo-docker-images). - -For scalable computation, the JupyterHub is also configured with [Dask Gateway](https://gateway.dask.org/). To create a Dask Cluster: - -```python ->>> from dask_gateway import GatewayCluster - ->>> cluster = GatewayCluster() # Creates the Dask Scheduler. Might take a minute. ->>> client = cluster.get_client() ->>> cluster.adapt(minimum=1, maximum=100) ->>> cluster -GatewayCluster -``` - -With this setup, all of the computation happens on Azure, whether on a single node or on a cluster with Dask. - -![Diagram showing compute components within Azure](images/jupyterhub-diagram.png) - -See [Scaling with Dask](../quickstarts/scale-with-dask.md) for an introduction to Dask. This setup was pioneered by the [Pangeo Community](https://pangeo.io/). The [Pangeo Cloud](https://pangeo.io/cloud.html) documention provides additional background on how to use Dask-enabled JupyterHubs. - -## Use VS Code to connect to a remote Jupyter Kernel - -See [Using VS Code](../overview/ui-vscode) for how to use Visual Studio Code as a user interface for the Planetary Computer's Compute. - ## Use your own compute -The previous methods relied on compute provided by the Planetary Computer, which is a great way to get started with the Planetary Computer's APIs and Data. For production workloads, we recommend deploying your own compute, which gives you more control over the hardware and software environment. -### Using GitHub Codespaces - -See [Use GitHub Codespaces](../overview/ui-codespaces) for how to use [GitHub Codespaces][codespaces] as a user interface and execution environment using data from the on the Planetary Computer catalog. +See [Scaling with Dask](../quickstarts/scale-with-dask.md) for an introduction to Dask. This setup was pioneered by the [Pangeo Community](https://pangeo.io/). The [Pangeo Cloud](https://pangeo.io/cloud.html) documention provides additional background on how to use Dask-enabled JupyterHubs. ### Using Azure Machine Learning @@ -92,5 +56,3 @@ and connect to it Like the previous setup, the Dask scheduler and workers are running in Azure near the data. The local client might be outside of Azure. ![Diagram showing compute with self-managed Dask cluster](images/cloudprovider-diagram.png) - -[codespaces]: https://github.com/features/codespaces diff --git a/docs/concepts/hub-deployment.md b/docs/concepts/hub-deployment.md index d96f13ce..407f0029 100644 --- a/docs/concepts/hub-deployment.md +++ b/docs/concepts/hub-deployment.md @@ -1,14 +1,16 @@ -## Deploy your own Hub +## Deploy your own JupyterHub -The Planetary Computer Hub is a [JupyterHub] deployment configured with [Dask Gateway][gateway] for scalable computing. -Deploying your own Hub is a good option for a team of users looking to work with data from the Planetary Computer. You might choose this instead of the [Planetary Computer Hub][hub] if you need to specialize the environment, require additional computation resources, or want to tie a Hub into a broader Azure deployment while still using data from the Planetary Computer. +Deploying your own [JupyterHub] is a good option for a team of users looking to +work with data from the Planetary Computer who need a specialized environment, +require additional computation resources, or want to tie a compute environmnt into a broader +Azure deployment while still using data from the Planetary Computer. In this guide you will: * Deploy an [AKS] cluster using the Azure CLI * Deploy JupyterHub and Dask Gateway using the [daskhub] Helm chart. -We describe two deployment scenarios, a [simple](docs/concepts/hub-deployment/#simple-deployment) and a [recommended](docs/concepts/hub-deployment/#recommended-deployment) deployment. If you're new to Azure, Kubernetes, or JupyterHub, then you should try the simple deployment to verify that the basics work, before moving on to the more advanced recommended deployment. Finally, the configuration for the Planetary Computer's Hub is available on [GitHub](https://github.com/microsoft/planetary-computer-hub), which provides a reference for a real-world deployment. +We describe two deployment scenarios, a [simple](docs/concepts/hub-deployment/#simple-deployment) and a [recommended](docs/concepts/hub-deployment/#recommended-deployment) deployment. If you're new to Azure, Kubernetes, or JupyterHub, then you should try the simple deployment to verify that the basics work, before moving on to the more advanced recommended deployment. Finally, the configuration for the Planetary Computer based JupyeteHub is available on [GitHub](https://github.com/microsoft/planetary-computer-hub), which provides a reference for a real-world deployment. For background, we recommend reading the [Zero to JupyterHub with Kubernetes][z2jh] guide and the [Dask Gateway on Kubernetes][gateway-k8s] documentation. @@ -393,4 +395,3 @@ Your AKS cluster and JupyterHub deployments can be customized in various ways. V [JupyterHub]: https://jupyterhub.readthedocs.io/en/stable/ [prerequisites]: https://docs.microsoft.com/en-us/azure/aks/kubernetes-walkthrough#prerequisites [z2jh]: https://zero-to-jupyterhub.readthedocs.io/en/latest/index.html -[hub]: ../overview/environment diff --git a/docs/concepts/sas.md b/docs/concepts/sas.md index 81597022..f115cff2 100644 --- a/docs/concepts/sas.md +++ b/docs/concepts/sas.md @@ -47,28 +47,7 @@ The `sign` endpoint makes it easy to convert an unsigned blob URL to a signed UR The `href` field here contains the full, signed URL which may be used directly. -### When an account is needed - -The STAC metatdata API is available to all users and does not require an account or a token to use. While all data assets require a token for accessing files on Azure Blob storage, some datasets also require an account key to be used when generating the token. This requirement is inidcated on the Data Catalog page of affected datasets. For these datasets, be sure to include your subscription key when requesting a token, as described below. - -### Supplying a subscription key - -When your Planetary Computer [account request](http://planetarycomputer.microsoft.com/account/request) was approved, a pair of subscription keys were automatically generated -for you. You can view your keys by singing in to the [developer portal](https://planetarycomputer.developer.azure-api.net/). - -You can supply your subscription key in an HTTP request in two ways: - -* Supply it in an `Ocp-Apim-Subscription-Key` on request header, for example: - -```bash -curl -H "Ocp-Apim-Subscription-Key: 123456789" https://planetarycomputer.microsoft.com/api/sas/v1/token/naip -``` - -* Supply it in a `subscription-key` query parameter, for example: - -```bash -curl https://planetarycomputer.microsoft.com/api/sas/v1/token/naip?subscription-key=123456789 -``` +The STAC metatdata API is available to all users and does not require an account or a token to use. All data assets require a token for accessing files on Azure Blob storage. ### Rate limits and access restrictions @@ -78,12 +57,9 @@ Rate limiting and token expiry are dependent on two aspects of each requests: * Whether or not the request is originating from within the same data center as the Planetary Computer service (West Europe) * Whether or not a valid API subscription key has been supplied on the request -These two variables are used to determine the tier of rate limiting which is applied to requests, as well as the valid length of time for issued SAS tokens. For the most un-throttled access, we recommend utilizing a Planetary Computer subscription key and doing your work in the West Europe Azure region. - -Most datasets in the Planetary Computer are anonymously accessible: you don't need to supply a subscription key to get a SAS token for downloading the data. -Some datasets do require a subscription key, and some datasets are only available to certain approved users even if a subscription key is provided. This will be noted in -the dataset detail page in the [data catalog](https://planetarycomputer.microsoft.com/catalog). +These two variables are used to determine the tier of rate limiting which is applied to requests, as well as the valid length of time for issued SAS tokens. For the most un-throttled access, we recommend doing your work in the West Europe Azure region. +Datasets in the Planetary Computer are anonymously accessible: you don't need to supply a subscription key to get a SAS token for downloading the data. ### `planetary-computer` Python package @@ -130,4 +106,3 @@ planetarycomputer configure ``` Or you can set the environment variable `PC_SDK_SUBSCRIPTION_KEY` to your API subscription key. -Your subscription key is set automatically for you on the [Planetary Computer Hub](../overview/environment). diff --git a/docs/conf.py b/docs/conf.py index 61268a58..8b89752a 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -49,18 +49,6 @@ .. raw:: html diff --git a/docs/index.md b/docs/index.md index 82148904..65c87cbe 100644 --- a/docs/index.md +++ b/docs/index.md @@ -13,7 +13,6 @@ About Explorer -The Hub Use VS Code Use GitHub Codespaces Using QGIS diff --git a/docs/overview/about.md b/docs/overview/about.md index 80c7d39e..e341aa62 100644 --- a/docs/overview/about.md +++ b/docs/overview/about.md @@ -6,30 +6,25 @@ The Planetary Computer consists of four major components: - The [Data Catalog](https://planetarycomputer.microsoft.com/catalog), which includes petabytes of data about Earth systems, hosted on Azure and made available to users for free. - [APIs](../concepts/stac.md) that allow users to search for the data they need across space and time. -- The [Hub](./environment.md), a fully managed computing environment that allows scientists to process massive geospatial datasets. - [Applications](https://planetarycomputer.microsoft.com/applications), built by our network of partners, that put the Planetary Computer platform to work for environmental sustainability. ## Built on Open -The Planetary Computer uses open source tools and supports open standards. In fact, the foundation of the Planetary Computer is the incredible ecosystem of tools being developed in the open by our partners and the much broader open source community. For example, our Hub builds on the work done by the [Pangeo](http://pangeo.io/) community to put the tools of data science to work for the Earth science community, and our API builds on the work done by the [STAC](https://stacspec.org/) community to streamline and standardize the cataloging and discovery of geospatial data. +The Planetary Computer uses open source tools and supports open standards. In fact, the foundation of the Planetary Computer is the incredible ecosystem of tools being developed in the open by our partners and the much broader open source community. For example, our API builds on the work done by the [STAC](https://stacspec.org/) community to streamline and standardize the cataloging and discovery of geospatial data. Many of the Planetary Computer components are also open-source. These provide guidance on how to tie together open-source libraries on Azure for geospatial and environmental data analysis. | GitHub repository | Purpose | |-------------------------------------------------------------------------------------------------------|----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| | [Microsoft/planetary-computer-apis](https://github.com/Microsoft/planetary-computer-apis) | Deploys the [STAC](https://planetarycomputer.microsoft.com/docs/reference/stac/) and [data](https://planetarycomputer.microsoft.com/docs/reference/data/) APIs | -| [Microsoft/planetary-computer-hub](https://github.com/Microsoft/planetary-computer-hub) | Deploys the [Planetary Computer Hub](https://planetarycomputer.microsoft.com/docs/overview/environment/) | -| [Microsoft/planetary-computer-containers](https://github.com/Microsoft/planetary-computer-containers) | Builds the container images with the [software environment](https://planetarycomputer.microsoft.com/docs/overview/environment/#understanding-the-software-environment) used on the Hub | | [Microsoft/PlanetaryComputerExamples](https://github.com/microsoft/planetarycomputerexamples) | Contains notebooks with examples for each dataset, quickstarts, and tutorials for using the Planetary Computer | ## About the Preview -While the Planetary Computer data and APIs are publicly accessible, certain features of the Planetary Computer are in Preview and require access be granted. Use the [request access form](https://planetarycomputer.microsoft.com/account/request) to express your interest in becoming an early user. - -In the meantime, the core components of the Planetary Computer are usable without an account: +The Planetary Computer data and APIs are publicly accessible and can be used without an account, including: - The [STAC API](../reference/stac) is public and can be accessed anonymously. -- Most data can be downloaded anonymously, but will be throttled. See [Reading data from the STAC API](../quickstarts/reading-stac.ipynb) for an introduction and [Using Tokens for Data Access](../concepts/sas) for more background on accessing data. Some datasets require a Planetary Computer API subscription key. +- Most data can be downloaded anonymously, but will be throttled. See [Reading data from the STAC API](../quickstarts/reading-stac.ipynb) for an introduction and [Using Tokens for Data Access](../concepts/sas) for more background on accessing data. We're just getting started. Check back for updated documentation and new features! @@ -45,7 +40,6 @@ We're just getting started. Check back for updated documentation and new feature - [Sentinel-2 L2A](https://planetarycomputer.microsoft.com/dataset/sentinel-2-l2a#Example-Notebook) - [NAIP](https://planetarycomputer.microsoft.com/dataset/naip#Example-Notebook) - [ASTER L1T](https://planetarycomputer.microsoft.com/dataset/aster-l1t#Example-Notebook) -- [Request a Planetary Computer account](https://planetarycomputer.microsoft.com/account/request) ## Beyond the Planetary Computer @@ -74,4 +68,4 @@ If the Planetary Computer is useful for your work, please cite it using [this re doi = {10.5281/zenodo.7261897}, url = {https://doi.org/10.5281/zenodo.7261897} } -``` \ No newline at end of file +``` diff --git a/docs/overview/environment.md b/docs/overview/environment.md deleted file mode 100644 index 795dcc51..00000000 --- a/docs/overview/environment.md +++ /dev/null @@ -1,136 +0,0 @@ -# Planetary Computer Hub - -The Planetary Computer Hub is a convenient option for computing on the [data](https://planetarycomputer.microsoft.com/catalog) provided by the Planetary Computer. The Hub is a [JupyterHub](https://jupyterhub.readthedocs.io/en/stable/) deployment that includes a set of commonly used packages for geospatial and sustainability data analysis. It's enabled with [Dask](https://dask.org/) for scalable computing. - -## Logging in - -Once you have an [account](https://planetarycomputer.microsoft.com/account/request) on the Planetary Computer, you can log into the Hub. Visit [http://planetarycomputer.microsoft.com/compute](http://planetarycomputer.microsoft.com/compute) to access the Hub. - -![JupyterHub login page](images/hub-login.png) - -The Hub offers several *environments* to customize your computing environment. This determines the computing power (CPU cores, memory, GPUs) and software environment of your server. - -![JupyterHub environments: Python, R, GPU - Pytorch, GPU - Tensorflow, and QGIS](images/hub-profiles.png) - -Select an environment and click *Start* to start your server. You'll see a progress bar and some logs as your server comes online. Once it's ready you'll be redirected to Jupyterlab. - -## Stopping your server - -When you're done with your session on the hub, make sure to stop your server to release the resources your consuming. From the Jupyterlab UI, select *File > Hub Control Panel*. This will open a new tab at [https://pccompute.westeurope.cloudapp.azure.com/compute/hub/home](https://pccompute.westeurope.cloudapp.azure.com/compute/hub/home), which lets you manage your servers. - -Note that the QGIS environment doesn't run the Jupyterlab UI, so you'll need to navigate directly to the [JupyterHub Control Panel](https://pccompute.westeurope.cloudapp.azure.com/compute/hub/home) to stop your server. - -![Jupyterlab menu showing how to open the hub control panel.](images/jupyterlab-menu.png) - -Select *Stop My Server* to stop your server and release all of the resources you're consuming. - -![JupyterHub menu to stop the server](images/hub-home.png) - -Note that we will automatically stop notebook servers that appear idle or are older that 24 hours. -The Planetary Computer Hub is primarily intended for interactive computation on datasets -from our catalog. - -## Using JupyterLab - -The [JupyterLab User Guide](https://jupyterlab.readthedocs.io/en/stable/user/interface.html) describes the JupyterLab User Interface in detail. The Hub environments include a few common extensions. - -* Dask Labextension: Dask diagnostics from within jupyterlab. See [Scale with Dask](quickstarts/scale-with-dask/) for more (Python only). -* GeoJSON rending with [jupyterlab-geojson](https://github.com/jupyterlab/jupyter-renderers/tree/master/packages/geojson-extension). - -## Understanding the file-system - -Your server has a **home directory** at `/home/jovyan/` where you can store local files. This directory persists across sessions; if you create a file in `/home/jovyan/`, stop your server, and start it up again that file will still be there. - -Everything *outside* of `/home/jovyan` is *not* persisted across sessions. For example, if you make a modification to the Python environment at `/srv/conda/envs/notebook` by manually `pip install`ing a package, that new package will not be available after stopping and starting your server. - -You should only store code, notebooks, and analysis documents in your home directory. -Notably, you shouldn't store large amounts of data in your home directory. Instead, use something like [Azure Blob Storage](https://docs.microsoft.com/en-us/azure/storage/blobs/storage-blobs-introduction) instead (make sure to use the West Europe region, which is where your server is running). If you must use a local filesystem for your workflow, then consider writing to `/tmp`. This will be cleared between sessions, but will give you more room to work with than your home directory. - -## Environment Variables - -We set several environment variables when your server is started. - -| Variable | Value | Description | -| ---------------------------------- | ------------- | ---------------------------------------------------------------------------------------------------------------------------------------------------- | -| PC_SDK_SUBSCRIPTION_KEY | unique to you | Used to [sign assets](../concepts/sas) to download data. | -| GDAL_DISABLE_READDIR_ON_OPEN | EMPTY_DIR | [Improves GDAL performance](https://trac.osgeo.org/gdal/wiki/ConfigOptions#GDAL_DISABLE_READDIR_ON_OPEN) when opening single COGs with GDAL. | -| GDAL_HTTP_MERGE_CONSECUTIVE_RANGES | YES | [Improves GDAL performance](https://trac.osgeo.org/gdal/wiki/ConfigOptions#GDAL_HTTP_MERGE_CONSECUTIVE_RANGES) by merging consecutive HTTP requests. | -| GDAL_HTTP_MAX_RETRY | 5 | Number of retries on HTTP errors 429, 502, 503, or 504. | -| GDAL_HTTP_RETRY_DELAY | 3 | Number of seconds between retries. | - -## Understanding the software environment - -Your software environment is determined by the environment you selected when starting your server. It's a [conda environment](https://docs.conda.io/projects/conda/en/latest/user-guide/tasks/manage-environments.html) located at ``/srv/conda/envs/notebook``. The environment contains many software libraries that are helpful for geospatial data analysis The environments are defined and packaged into Docker containers in the [planetary-computer-containers][containers] repository. - -The exact versions available can be viewed in the environment's `conda-linux-64.lock` file. For example, see the [Python](https://github.com/microsoft/planetary-computer-containers/blob/main/python/conda-linux-64.lock) or [R](https://github.com/microsoft/planetary-computer-containers/blob/main/r/conda-linux-64.lock) lock files. If you have a notebook server running, you can [open a new terminal](https://jupyterlab.readthedocs.io/en/stable/user/terminal.html) and run `conda list`. - -## Accessing other processes and services - -Some processes might start HTTP servers on your JuptyerHub notebook server, which you'd like to securely access through JupyterHub. For example, [Shiny](https://shiny.rstudio.com/) dashboards or [tensorboard](https://www.tensorflow.org/tensorboard/). - -The page [accessing ports or hosts](https://jupyter-server-proxy.readthedocs.io/en/latest/arbitrary-ports-hosts.html) documents how to proxy these services using [Jupyter Server Proxy](https://jupyter-server-proxy.readthedocs.io/en/latest/). For example, we can proxy the Tensorboard UI by [opening a terminal](https://jupyterlab.readthedocs.io/en/stable/user/terminal.html) on our notebook server and starting the tensorboard server. -``` -$ tensorboard --logdir=mylogdir -``` - -Then we can access the Tensorboard UI at the URL ending with `/proxy//` (note that the trailing `/` is important). Tensorboard uses `6006` by default. So the full URL would be `https://pccompute.westeurope.cloudapp.azure.com/compute/user//proxy/6006/`. - -### Installing additional packages at runtime - -You can install additional packages "at runtime", after starting your notebook server. We recommend using `conda` (specifically, the `mamba` solver) to install additional packages, which will ensure that your environment stays in a consistent state. Most of the packages already present come from the [conda-forge](https://conda-forge.org/) channel, so you should prefer it with `-c conda-forge`. - -```console -$ mamba install -c conda-forge r-rjson -``` - -`conda` is a multi-language package manager, so most R packages are prefixed with `r-`. You can search the list of packages available at . If a package isn't present, then consider [contributing it to conda-forge](https://conda-forge.org/#contribute). - -Alternatively, you can try installing packages from [PyPI](https://pypi.org/) or [CRAN](https://cran.r-project.org/), but note that existing packages may not be recognized. - -As mentioned [above](#Understanding-the-file-system), changes to the environment outside of your home directory are not persisted across sessions. If you ``pip`` or ``conda`` install a package, it will not be present the next time you start your server. If you think our environments are missing a package that's commonly used in geospatial or sustainability data analysis, then [open an issue](https://github.com/microsoft/PlanetaryComputer/issues) requesting that we add it. - -### Installing packages on Dask clusters at runtime - -If you're using Dask for scalable computing, you should ensure that modifications you make to your local software environment are present on the workers too. Dask provides a [PipInstallPlugin](https://distributed.dask.org/en/latest/plugins.html#distributed.diagnostics.plugin.PipInstall) to automatically install packages when workers start - -```python ->>> from dask.distributed import PipInstall ->>> plugin = PipInstall(packages=["scikit-learn"], pip_options=["--upgrade"]) ->>> client.register_worker_plugin(plugin) -``` - -Note that this will slow down worker startup, since the packages will need to be found and downloaded before the worker can start executing tasks. - -## Cluster Limits - -There are a few restrictions on the size of the Dask Clusters you can create. - -1. The maximum number of **cores per worker** is 8, and the maximum amount of **memory per worker** is 64 GiB. This ensures that the workers fit in the [Standard_E8_v3 Virtual Machines][vms] used for workers. -2. The maximum number of **cores per cluster** is 400 -3. The maximum amount of **memory per cluster** is 3200 GiB -4. The maximum number of **workers per cluster** is 400 - -With the default settings of 1 core and 8 GiB per worker, this means a limit of 400 workers on 50 physical nodes (each with 8 cores and 64 GiB of memory). If this limit is too low for your use-case, [send us an email][email]. - -If you attempt to scale beyond the maximum cores or memory per worker, an exception is raised since your requested workers are larger than they Virtual Machines can handle. - -```python ->>> gateway = dask_gateway.Gateway() ->>> options = gateway.cluster_options() ->>> options["worker_cores"] = 16 -Traceback (most recent call last): -... -ValueError: worker_cores must be <= 8.000000, got 16.0 -``` - -If you attempt to scale beyond the maximum number of cores, memory, or workers per cluster, you'll see a warning and the cluster will be scaled to the limit. - -```python ->>> cluster = gateway.new_cluster() ->>> cluster.scale(1_000) -GatewayWarning: Scale request of 1000 workers would exceed resource limit of 400 workers. Scaling to 400 instead. -``` - -[vms]: https://docs.microsoft.com/en-us/azure/virtual-machines/ev3-esv3-series -[email]: mailto:planetarycomputer@microsoft.com -[containers]: https://github.com/microsoft/planetary-computer-containers diff --git a/docs/overview/explorer.md b/docs/overview/explorer.md index 444d6e32..28ea3e05 100644 --- a/docs/overview/explorer.md +++ b/docs/overview/explorer.md @@ -12,8 +12,7 @@ the datasets on the Planetary Computer are updated repeatedly over time, with data items overlapping geographically. Finding the right subset can be a time consuming process using programmatic tools, with a slow search-render-iterate feedback loop. The Explorer was designed to allow users to quickly find and -visualize data, and easily recreate their searches in an analytic environment -like our [Hub][pc-docs-hub]. +visualize data, and easily recreate their searches in an analytic environment. Additionally, it's a great tool for casual browsing to get more familiar with datasets, their spatiotemporal range, and to understand what types of @@ -185,47 +184,29 @@ water extent of Lake Powell, UT. :class: no-scaled-link ``` -### Working with results in the Hub +### Working with results with Python Finding and visualizing data items is likely only the first step in your data analysis workflow. The Explorer provides two options to move your search results -into a compute environment like the [Planetary Computer Hub][pc-docs-hub]. By -clicking "Explore results in the Hub" under the search results, you can generate -a Python snippet to recreate your exact search covering the map -viewport. This can be copied and launched into a new Hub notebook: - -```{image} images/explorer-hub.png -:height: 500 -:name: Planetary Computer Explorer hub code -:class: no-scaled-link -``` +into a compute environment. By clicking "Code snippet for search results" under +the search results, you can generate a Python snippet to recreate your exact +search covering the map viewport. Or, if you're interested in working with a single item you've found, you can generate a Python snippet by clicking the "code" (`{ }`) button which will load -that single item in the Hub: +that single item. -```{image} images/explorer-item-hub.png -:height: 500 -:name: Planetary Computer Explorer hub item code -:class: no-scaled-link -``` - -Since our data and APIs are accessible to anyone with or without a Hub account, -these snippets can be run in other Python compute environments. Please refer to +Since our data and APIs are accessible to anyone without an account, +these snippets can be run in any compute environment. Please refer to our [data access documentation][pc-docs-sas] for more details. ## Coming soon -We're constantly adding improvements to the Planetary Computer and the Explorer. Upcoming features include: - -- Visualization support for Zarr-backed datasets -- Hub integration to build queries and visualize search results within a notebook -- Layer comparison features +We're constantly adding improvements to the Planetary Computer and the Explorer. Watch for future announcements, and [visit our GitHub Discussions](https://github.com/microsoft/PlanetaryComputer/discussions) for feedback or questions. [1]: https://planetarycomputer.microsoft.com/explore -[pc-docs-hub]: https://planetarycomputer.microsoft.com/docs/overview/environment [pc-docs-api]: https://planetarycomputer.microsoft.com/docs/quickstarts/reading-stac/ [pc-docs-sas]: https://planetarycomputer.microsoft.com/docs/concepts/sas/ [pc-catalog]: https://planetarycomputer.microsoft.com/catalog diff --git a/docs/overview/qgis-plugin.md b/docs/overview/qgis-plugin.md index 37eaa283..e8c15a12 100644 --- a/docs/overview/qgis-plugin.md +++ b/docs/overview/qgis-plugin.md @@ -5,39 +5,12 @@ mapping, analysis, and visualization of spatial data. While much of the Planetary Computer is designed to support [cloud-native][cnative] workflows, it's easy to use our data in a traditional desktop clients, like QGIS, as well. -## QGIS on the Hub - -Planetary Computer users can use a QGIS instance directly in the Hub. We offer -an experimental Linux desktop image that is preloaded with QGIS and is located -in the same data center that the Planetary Computer datasets and APIs are hosted -in. This proximity allows for the most efficient data access, but users can -also use our QGIS tools and APIs in their own local or cloud-hosted -QGIS environments. - -To launch a cloud QGIS session in your browser, go to the [Hub][hub], click the "QGIS -(preview)" environment, and launch the instance. - -```{image} images/qgis-image-launch.png -:height: 145 -:name: Planetary Computer qgis hub selection -:class: no-scaled-link -``` - -It may take a few minutes for the instance to be ready, and will launch the QGIS -application on startup. Like other Hub instances, the home directory (`/home/jovyan`) is persisted between sessions, so save important data within this directory. - -```{image} images/qgis-desktop.png -:height: 500 -:name: Planetary Computer qgis hub -:class: no-scaled-link -``` - ## QGIS STAC API Plugin Since the Planetary Computer metadata API is built on the [STAC](https://stacspec.org/) standard, the easiest way to search for and add data to QGIS is by using the [STAC API Browser plugin][plugin-url]. Microsoft supported development of this plugin, with our partners [Kartoza][kartoza], to -make accessing STAC APIs from QGIS easier. The Planetary Computer QGIS Hub image comes with this plugin pre-installed. +make accessing STAC APIs from QGIS easier. For a general overview on plugin usage, the [official user guide][user-guide]. @@ -80,8 +53,7 @@ for your current zoom layer. **Note that assets added added as COG layers are signed using the SAS token mechanism, which expires by default after an hour.** You'll need to re-add the layer, though we're working on improvements to that workflow in the next version -of the plugin. This option will perform well on the Hub instance of QGIS, but -may be slower if you're working with a local QGIS instance. +of the plugin. This option may be slower if you're working with a local QGIS instance. 3. _Download the asset_. The plugin also allows you to download the asset directly, so you can work with the file itself. Be sure to set your download directory on the Settings tab to a path located within your home directory, so the data is preserved between launches. @@ -94,7 +66,6 @@ located within your home directory, so the data is preserved between launches. [qgis-home]: https://qgis.org/en/site/about/index.html [cnative]: https://en.wikipedia.org/wiki/Cloud_native_computing -[hub]: https://pccompute.westeurope.cloudapp.azure.com/compute/hub/ [plugin-url]: https://stac-utils.github.io/qgis-stac-plugin/ [kartoza]: https://kartoza.com/ [user-guide]: https://stac-utils.github.io/qgis-stac-plugin/user-guide/ diff --git a/docs/overview/ui-codespaces.md b/docs/overview/ui-codespaces.md index 3fe5465f..544ecd32 100644 --- a/docs/overview/ui-codespaces.md +++ b/docs/overview/ui-codespaces.md @@ -26,38 +26,6 @@ Your Codespace uses a [dev container][container] to provide all the software and We publish our environments, which contain many packages useful for geospatial data analysis, at . These can be used in the Codespaces [configuration] for your project to ensure you have access to those packages. -## Use the Planetary Computer's Dask Gateway - -Codespaces gives you access to a single node that's physically close to the Planetary Computer's data. If you need multiple machines, you can the Planetary Computer's Dask Gateway to scale your analysis. - -```{note} Using the Planetary Computer's Dask Gateway requires a Planetary Computer account. If you don't have an account, you can [request access][request]. -``` - -First, you'll need a JupyterHub API token. You can generate one at . - -Next, set that JupyterHub API token as an encrypted secret by following [this guide](https://docs.github.com/en/codespaces/managing-your-codespaces/managing-encrypted-secrets-for-your-codespaces). You'll need to grant access to the repositories you want to access the secret. To ensure that it's detected automatically by dask-gateway, the environment variable must be named `JUPYTERHUB_API_TOKEN`. - -You'll need to restart your codespace after granting it access to the `JUPYTERHUB_API_TOKEN` secret, if it's already running. - -Finally, configure your `devcontainer.json` file to include the relevant environment variables. If you created your Codespace by forking [Planetary Computer Examples][examples], you'll already have these set. - -```json -{ - "containerEnv": { - "DASK_GATEWAY__AUTH__TYPE": "jupyterhub", - "DASK_GATEWAY__CLUSTER__OPTIONS__IMAGE": "pcccr.azurecr.io/public/planetary-computer/python:latest", - "DASK_GATEWAY__ADDRESS": "https://pccompute.westeurope.cloudapp.azure.com/compute/services/dask-gateway", - "DASK_GATEWAY__PROXY_ADDRESS": "gateway://pccompute-dask.westeurope.cloudapp.azure.com:80", - } -} -``` - -Now, you can create a [Dask] cluster and distribute your workload on multiple machines. - -![Creating a Dask Cluster from Codespaces.](images/codespaces-dask.png) - -Any computations using Dask will execute on your cluster. - [catalog]: https://planetarycomputer.microsoft.com/catalog [codespaces]: https://github.com/features/codespaces [configuration]: https://docs.github.com/en/codespaces/customizing-your-codespace/configuring-codespaces-for-your-project @@ -65,5 +33,3 @@ Any computations using Dask will execute on your cluster. [examples]: https://github.com/microsoft/planetarycomputerexamples [fork]: https://guides.github.com/activities/forking/ [region]: https://docs.github.com/en/codespaces/managing-your-codespaces/setting-your-default-region-for-codespaces -[request]: https://planetarycomputer.microsoft.com/account/request -[Dask]: https://dask.org/ \ No newline at end of file diff --git a/docs/overview/ui-vscode.md b/docs/overview/ui-vscode.md deleted file mode 100644 index 59c64f01..00000000 --- a/docs/overview/ui-vscode.md +++ /dev/null @@ -1,52 +0,0 @@ -# Use Visual Studio Code - -You can connect to the Planetary Computer Hub using [Visual Studio Code](https://code.visualstudio.com/). With this setup, you use a local instance of VS Code (installed on your laptop or desktop computer) to connect to a remote Jupyter kernel running in the Planetary Computer Hub. - -![Diagram showing a local VS Code instance connecting to a remote Jupyter kernel in the Planetary Computer Hub](images/vscode-diagram.png) - -You might choose this setup - rather than logging in to the Hub directly and working in JupyterLab - because you prefer VS Code as an editing environment and have customized your local environment, but you still want the benefits of computing in the Planetary Computer Hub, like having compute that's physically close to Planetary Computer data and not having to manage a Python environment. - -The *source files* you're working with will be stored on your local machine's hard drive, but they will be executed in Azure, next to the data. - -## Setting up your Hub environment - -Before you can work with the Planetary Computer Hub with VS Code, you need to start a server on the Hub and create your API token. - -1. **[Log into the Hub](https://pccompute.westeurope.cloudapp.azure.com/compute/hub/spawn)** to start a notebook server prior to connecting from VS Code. - -2. **[Create a JupyterHub API Token](http://planetarycomputer.microsoft.com/compute/hub/token)** so that JupyterHub knows who you are when you try to connect from VS Code. This token is private and should not be shared publicly! - -## Setting up your local environment - -2. **Install the Jupyter Extension for VS Code** from [the online VS extension marketplace](https://marketplace.visualstudio.com/items?itemName=ms-toolsai.jupyter) or from within VS Code using the [embedded extension marketplace](https://code.visualstudio.com/docs/editor/extension-marketplace). - -3. **Optionally clone the [Planetary Computer Examples](https://github.com/Microsoft/PlanetaryComputerExamples) repository** to your local environment. This step is not required, but this repo provides a number of useful starter examples. - -4. **Connect from VSCode**. - - Using the VS Code Command Palette, select "Jupyter: Specify local or remote Jupyter server for connections": - - ![Jupyter: Specify local or remote Jupyter server for connections](images/specify-jupyter.png) - - Then select "Existing" to specify the URI of an existing server: - - ![Jupyter: Existing server](images/existing.png) - - Finally, input your connection string, which should look like `https://pccompute.westeurope.cloudapp.azure.com/compute/user//?token=`: - - ![Jupyter: Server URI](images/vscode-jupyter-uri.png) - - The components in that URI are: - - - The Hub address: `https://pccompute.westeurope.cloudapp.azure.com/compute` - - `/user/` - - Your username: Probably your email address. Get this from the URL in your browser when you sign into the Hub. - - `/?token=` - - The token you just generated [on the Hub](http://planetarycomputer.microsoft.com/compute/hub/token). - -5. **Press "Enter" to connect to that kernel**. - - Then reload the Jupyter extension and you should be connected. - -VS Code will save this configuration. The next time you connnect you just need to start your server and select that existing connection string. - diff --git a/package-lock.json b/package-lock.json index 49b35a22..04682433 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "pc-datacatalog", - "version": "2024.1.2", + "version": "2024.1.3", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -5569,11 +5569,6 @@ "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz", "integrity": "sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ==" }, - "deepmerge": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-2.2.1.tgz", - "integrity": "sha512-R9hc1Xa/NOBi9WRVUWg19rl1UB7Tt4kuPd+thNJgFZoxXsTz7ncaPaeIm+40oSGuP33DfMb4sZt1QIGiJzC4EA==" - }, "default-gateway": { "version": "6.0.3", "resolved": "https://registry.npmjs.org/default-gateway/-/default-gateway-6.0.3.tgz", @@ -7015,27 +7010,6 @@ "web-streams-polyfill": "4.0.0-beta.3" } }, - "formik": { - "version": "2.2.9", - "resolved": "https://registry.npmjs.org/formik/-/formik-2.2.9.tgz", - "integrity": "sha512-LQLcISMmf1r5at4/gyJigGn0gOwFbeEAlji+N9InZF6LIMXnFNkO42sCI8Jt84YZggpD4cPWObAZaxpEFtSzNA==", - "requires": { - "deepmerge": "^2.1.1", - "hoist-non-react-statics": "^3.3.0", - "lodash": "^4.17.21", - "lodash-es": "^4.17.21", - "react-fast-compare": "^2.0.1", - "tiny-warning": "^1.0.2", - "tslib": "^1.10.0" - }, - "dependencies": { - "tslib": { - "version": "1.14.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.14.1.tgz", - "integrity": "sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg==" - } - } - }, "forwarded": { "version": "0.2.0", "resolved": "https://registry.npmjs.org/forwarded/-/forwarded-0.2.0.tgz", @@ -10302,11 +10276,6 @@ "big-integer": "^1.6.16" } }, - "nanoclone": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/nanoclone/-/nanoclone-0.2.1.tgz", - "integrity": "sha512-wynEP02LmIbLpcYw8uBKpcfF6dmg2vcpKqxeH5UcoKEYdExslsdUA4ugFauuaeYdTB76ez6gJW8XAZ6CgkXYxA==" - }, "nanoid": { "version": "3.3.4", "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.4.tgz", @@ -11784,11 +11753,6 @@ "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", "dev": true }, - "property-expr": { - "version": "2.0.5", - "resolved": "https://registry.npmjs.org/property-expr/-/property-expr-2.0.5.tgz", - "integrity": "sha512-IJUkICM5dP5znhCckHSv30Q4b5/JA5enCtkRHYaOVOAocnH/1BQEYTC5NMfT3AVl/iXKdr3aqQbQn9DxyWknwA==" - }, "property-information": { "version": "5.6.0", "resolved": "https://registry.npmjs.org/property-information/-/property-information-5.6.0.tgz", @@ -12100,11 +12064,6 @@ "resolved": "https://registry.npmjs.org/react-error-overlay/-/react-error-overlay-6.0.11.tgz", "integrity": "sha512-/6UZ2qgEyH2aqzYZgQPxEnz33NJ2gNsnHA2o5+o4wW9bLM/JYQitNP9xPhsXwC08hMMovfGe/8retsdDsczPRg==" }, - "react-fast-compare": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/react-fast-compare/-/react-fast-compare-2.0.4.tgz", - "integrity": "sha512-suNP+J1VU1MWFKcyt7RtjiSWUjvidmQSlqu+eHslq+342xCbGTYmC0mEhPCOHxlW0CywylOC1u2DFAT+bv4dBw==" - }, "react-helmet": { "version": "6.1.0", "resolved": "https://registry.npmjs.org/react-helmet/-/react-helmet-6.1.0.tgz", @@ -13783,11 +13742,6 @@ "resolved": "https://registry.npmjs.org/thunky/-/thunky-1.1.0.tgz", "integrity": "sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==" }, - "tiny-warning": { - "version": "1.0.3", - "resolved": "https://registry.npmjs.org/tiny-warning/-/tiny-warning-1.0.3.tgz", - "integrity": "sha512-lBN9zLN/oAf68o3zNXYrdCt1kP8WsiGW8Oo2ka41b2IM5JL/S1CTyX1rW0mb/zSuJun0ZUrDxx4sqvYS2FWzPA==" - }, "tmp": { "version": "0.2.1", "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", @@ -13825,11 +13779,6 @@ "resolved": "https://registry.npmjs.org/toidentifier/-/toidentifier-1.0.1.tgz", "integrity": "sha512-o5sSPKEkg/DIQNmH43V0/uerLrpzVedkUh8tGNvaeXpfpuwjKenlSox/2O/BTlZUtEe+JG7s5YhEz608PlAHRA==" }, - "toposort": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/toposort/-/toposort-2.0.2.tgz", - "integrity": "sha1-riF2gXXRVZ1IvvNUILL0li8JwzA=" - }, "tough-cookie": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/tough-cookie/-/tough-cookie-4.0.0.tgz", @@ -14963,20 +14912,6 @@ "resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz", "integrity": "sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q==" }, - "yup": { - "version": "0.32.11", - "resolved": "https://registry.npmjs.org/yup/-/yup-0.32.11.tgz", - "integrity": "sha512-Z2Fe1bn+eLstG8DRR6FTavGD+MeAwyfmouhHsIUgaADz8jvFKbO/fXc2trJKZg+5EBjh4gGm3iU/t3onKlXHIg==", - "requires": { - "@babel/runtime": "^7.15.4", - "@types/lodash": "^4.14.175", - "lodash": "^4.17.21", - "lodash-es": "^4.17.21", - "nanoclone": "^0.2.1", - "property-expr": "^2.0.4", - "toposort": "^2.0.2" - } - }, "zenscroll": { "version": "4.0.2", "resolved": "https://registry.npmjs.org/zenscroll/-/zenscroll-4.0.2.tgz", diff --git a/package.json b/package.json index c36d2a3f..fe840412 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "pc-datacatalog", - "version": "2024.1.3", + "version": "2024.2.0", "private": true, "proxy": "http://api:7071/", "dependencies": { @@ -35,7 +35,6 @@ "buffer": "^6.0.3", "dayjs": "^1.11.3", "dompurify": "^2.3.8", - "formik": "^2.2.9", "highlight.js": "^11.5.1", "https-browserify": "^1.0.0", "json-stringify-pretty-compact": "^4.0.0", @@ -59,8 +58,7 @@ "redux": "^4.2.0", "stream-http": "^3.2.0", "swagger-ui-react": "^4.16.1", - "typescript": "^4.6.2", - "yup": "^0.32.11" + "typescript": "^4.6.2" }, "scripts": { "start": "craco start", diff --git a/src/App.tsx b/src/App.tsx index 00fcf682..84ec2bac 100644 --- a/src/App.tsx +++ b/src/App.tsx @@ -5,7 +5,6 @@ import { initializeIcons } from "@uifabric/icons"; import { initializeFeatures } from "utils/featureFlags"; import { registerCustomIcons } from "utils/icons"; import { usePrefetchContent } from "utils/requests"; -import AccountSurvey from "pages/AccountSurvey"; import Applications from "pages/Applications"; import Collection from "pages/Collection"; import Catalog from "./pages/Catalog2"; @@ -48,7 +47,6 @@ function App() { } /> } /> } /> - } /> { - const { status } = useSession(); return ( { Documentation -
- - {!status.isLoggedIn && ( - - Request access - - )} - - - - - -
); }; const headerTokens = { childrenGap: "10px 21px" }; -const authSectionTokens = { childrenGap: 4 }; diff --git a/src/components/Header/Header.Overflow.tsx b/src/components/Header/Header.Overflow.tsx index db8b8f42..4c4012bb 100644 --- a/src/components/Header/Header.Overflow.tsx +++ b/src/components/Header/Header.Overflow.tsx @@ -77,9 +77,6 @@ export const HeaderOverflow: React.FC = () => { Documentation - - Request Access - diff --git a/src/components/MetadataHtmlContent.js b/src/components/MetadataHtmlContent.js index c1966384..568e3069 100644 --- a/src/components/MetadataHtmlContent.js +++ b/src/components/MetadataHtmlContent.js @@ -3,7 +3,6 @@ import { DefaultButton, MessageBar, MessageBarType, - PrimaryButton, Spinner, SpinnerSize, Stack, @@ -11,7 +10,7 @@ import { import { useStaticMetadata } from "../utils/requests"; import "../styles/codefiles.css"; -import { a11yPostProcessDom, buildGitHubUrl, buildHubLaunchUrl } from "../utils"; +import { a11yPostProcessDom, buildGitHubUrl } from "../utils"; import NewTabLink from "./controls/NewTabLink"; import GeneratedInternalToc from "../pages/Docs/components/GeneratedInternalToc"; @@ -29,16 +28,6 @@ const MetadataHtmlContent = ({ src, launch }) => { ) : null; - const launcher = launch ? ( - - Launch in Hub - - ) : null; - const loadingMsg = ( ); @@ -76,7 +65,6 @@ const MetadataHtmlContent = ({ src, launch }) => {
- {launcher} {ghLink}
diff --git a/src/components/forms/FormCheckbox.js b/src/components/forms/FormCheckbox.js deleted file mode 100644 index c4a42f28..00000000 --- a/src/components/forms/FormCheckbox.js +++ /dev/null @@ -1,21 +0,0 @@ -import { Checkbox } from "@fluentui/react"; -import { useFormikContext } from "formik"; - -const FormCheckbox = ({ name, label, placeholder = null, required = false }) => { - const { values, touched, errors, setFieldValue } = useFormikContext(); - - return ( - setFieldValue(name, checked)} - errorMessage={touched[name] && errors[name]} - onRenderLabel={typeof label === "object" ? () => label : undefined} - /> - ); -}; - -export default FormCheckbox; diff --git a/src/components/forms/FormInput.js b/src/components/forms/FormInput.js deleted file mode 100644 index 84236c30..00000000 --- a/src/components/forms/FormInput.js +++ /dev/null @@ -1,30 +0,0 @@ -import { TextField } from "@fluentui/react"; -import { useFormikContext } from "formik"; - -const FormInput = ({ - name, - label = "", - placeholder = "", - required = false, - multiline = false, -}) => { - const { values, touched, errors, handleChange } = useFormikContext(); - - return ( - - ); -}; - -export default FormInput; diff --git a/src/components/forms/FormSelect.js b/src/components/forms/FormSelect.js deleted file mode 100644 index f7d74280..00000000 --- a/src/components/forms/FormSelect.js +++ /dev/null @@ -1,37 +0,0 @@ -import * as React from "react"; -import { Dropdown } from "@fluentui/react"; -import { useFormikContext } from "formik"; - -const FormSelect = ({ label = "", name, options, multiSelect = false }) => { - const { values, setFieldValue } = useFormikContext(); - - return ( - { - if (multiSelect) { - const vals = values[name]; - if (option.selected) { - setFieldValue(name, [...vals, option.key]); - } else { - const idx = vals.indexOf(option.key); - - if (idx !== -1) { - setFieldValue(name, [...vals.slice(0, idx), ...vals.slice(idx + 1)]); - } - } - } else { - setFieldValue(name, option.key); - } - }} - /> - ); -}; - -export default FormSelect; diff --git a/src/components/stac/RequiresAccount.test.tsx b/src/components/stac/RequiresAccount.test.tsx deleted file mode 100644 index edac67a8..00000000 --- a/src/components/stac/RequiresAccount.test.tsx +++ /dev/null @@ -1,45 +0,0 @@ -// import { render } from "testUtils"; - -import { render } from "@testing-library/react"; -import { IStacCollection } from "types/stac"; -import { CollectionProvider } from "./CollectionContext"; -import RequiresAccount from "./RequiresAccount"; - -const getAcctReqCollection = (required: boolean): IStacCollection => { - return { - id: "test", - title: "Test", - description: "Test", - license: "", - item_assets: {}, - keywords: [], - assets: {}, - extent: { spatial: { bbox: [] }, temporal: { interval: [] } }, - links: [], - "msft:short_description": "", - "msft:requires_account": required, - "msft:region": "westeurope", - }; -}; - -test("it shows message when msft:requires_account is true", async () => { - const collection = getAcctReqCollection(true); - const { getByTestId } = render( - - - - ); - - expect(getByTestId("msft-acct-req-msg")).toBeInTheDocument(); -}); - -test("it doest not show message when msft:requires_account is false", () => { - const collection = getAcctReqCollection(false); - const { queryByTestId } = render( - - - - ); - - expect(queryByTestId("msft-acct-req-msg")).toBeNull(); -}); diff --git a/src/components/stac/RequiresAccount.tsx b/src/components/stac/RequiresAccount.tsx deleted file mode 100644 index a0af896f..00000000 --- a/src/components/stac/RequiresAccount.tsx +++ /dev/null @@ -1,43 +0,0 @@ -import { useStac } from "./CollectionContext"; -import { - IMessageBarStyles, - MessageBar, - MessageBarType, - Text, -} from "@fluentui/react"; -import { Link } from "react-router-dom"; -import { highContrastLinkColor } from "pages/StorageCollectionDetail/StorageCollectionDetail.index"; - -const RequiresAccount = () => { - const collection = useStac(); - - if (!collection?.["msft:requires_account"]) return null; - - return ( - - - A Planetary Computer account is required to access the assets in this - dataset.{" "} - - See documentation. - - - - ); -}; - -export default RequiresAccount; - -export const messageBarStyles: IMessageBarStyles = { - root: { - padding: "4px 2px", - borderRadius: 4, - }, -}; diff --git a/src/pages/AccountSurvey.tsx b/src/pages/AccountSurvey.tsx deleted file mode 100644 index d707fde4..00000000 --- a/src/pages/AccountSurvey.tsx +++ /dev/null @@ -1,229 +0,0 @@ -import { Formik, Form } from "formik"; -import { - Link, - PrimaryButton, - Separator, - Spinner, - SpinnerSize, - Stack, - Text, -} from "@fluentui/react"; -import { useMutation } from "react-query"; -import { useNavigate } from "react-router"; -import axios from "axios"; -import * as yup from "yup"; - -import SEO from "../components/Seo"; -import Layout from "../components/Layout"; -import FormInput from "../components/forms/FormInput"; -import FormSelect from "../components/forms/FormSelect"; -import FormCheckbox from "../components/forms/FormCheckbox"; -import DefaultBanner from "../components/DefaultBanner"; -import NewTabLink from "../components/controls/NewTabLink"; - -import options from "config/account.yml"; -import countries from "config/countries.yml"; - -import { marginVStyle } from "../styles"; -import { ScrollToTopOnMount } from "../components/ScrollToTopOnMount"; - -const stackTokens = { - spinnerStack: { - childrenGap: 20, - }, -}; - -const AccountSurvey = () => { - const navigate = useNavigate(); - const mutation = useMutation((survey: Record) => - axios.post("./api/survey", survey) - ); - - const handleSubmit = (survey: Record) => { - mutation.mutate(survey); - }; - - const validationSchema = yup.object({ - email: yup.string().email("Enter a valid email").required("Email is required"), - name: yup.string().required("Your name is required"), - affiliation: yup.string(), - industry: yup.string(), - languages: yup.array(), - country: yup.string(), - datasets: yup.string(), - areaOfStudy: yup.string(), - terms: yup.boolean().required(), - }); - - const formikProps = { - initialValues: { - email: "", - name: "", - affiliation: "", - industry: "", - languages: [], - country: "", - datasets: "", - studyArea: "", - terms: false, - }, - validationSchema: validationSchema, - onSubmit: handleSubmit, - }; - - const tosLabel = ( - - You have read and agree to our{" "} - terms of use. - - ); - - const form = ( - -
-

- The Planetary Computer Explorer, APIs and many datasets are available to - anyone who would like to use it. Check out the{" "} - Data Catalog to get started. -

-

- For specific datasets such{" "} - Sentinel 1 RTC or to - access the{" "} - - Planetary Computer Hub - {" "} - , you will need to create an account. -

-

- To request an account please tell us about the project or solution you are - working on and how you are using geospatial data. We strive to review new - requests frequently and will let you know once your request has been - approved. -

-

- Note: if you are trying to login with an enterprise email address, your - company may have locked down your usage of that email preventing you from - using it to log in. If that happens when trying to log in, please use a - non-enterprise or personal email address. -

- -

- Microsoft will use this information to communicate with you about the - Planetary Computer, to evaluate your eligibility to participate in our - preview, to prioritize new features, and to communicate non-identifying - information—both internally and externally—about the geographic regions and - focus areas that our users represent. For more information on how we use - your data please see{" "} - - Privacy & Cookies - - . -

- - - - - - - ({ key: c, text: c }))} - /> - - - - - - - {mutation.isLoading && } - - -
- ); - - const banner = ( - -

Request access

-

- The Planetary Computer API and Planetary Computer Hub are currently in - preview, and we're excited to expand our partner and developer network. -

-
- ); - - const successMsg = ( - <> -

Thank you for your interest!

-

- We are reviewing your request and will let you know once your account has - been approved. In the meantime, you can explore our data catalog by going to{" "} - Data Catalog page. Please contact{" "} - - planetarycomputer@microsoft.com - {" "} - if you have questions or if you would like to unsubscribe. -

- - - ); - - const failMsg = ( - <> -

Something went wrong...

- - Sorry, we seem to be having trouble with our signups at the moment. Please{" "} - navigate(0)}>try again or email{" "} - - planetarycomputer@microsoft.com - {" "} - for support. - - - - ); - - return ( - - -
- {mutation.isError && failMsg} - {(mutation.isLoading || mutation.isIdle) && form} - {mutation.isSuccess && successMsg} -
-
- ); -}; - -export default AccountSurvey; diff --git a/src/pages/Catalog2/Catalog.Banner.tsx b/src/pages/Catalog2/Catalog.Banner.tsx index 42a9b53b..e342018b 100644 --- a/src/pages/Catalog2/Catalog.Banner.tsx +++ b/src/pages/Catalog2/Catalog.Banner.tsx @@ -18,8 +18,7 @@ export const CatalogBanner: React.FC = ({ The Planetary Computer Data Catalog includes petabytes of environmental monitoring data, in consistent, analysis-ready formats. All of the datasets - below can be accessed via Azure Blob Storage, and can be used by developers - whether you're working within or outside of our Planetary Computer Hub. + below can be accessed via Azure Blob Storage.
diff --git a/src/pages/Collection.js b/src/pages/Collection.js index 2c0e7fa5..f7a54771 100644 --- a/src/pages/Collection.js +++ b/src/pages/Collection.js @@ -31,7 +31,6 @@ import Assets from "../components/stac/Assets"; import CollectionUrl from "components/stac/CollectionUrl"; import { ErrorBoundary } from "react-error-boundary"; import ErrorFallback from "components/ErrorFallback"; -import RequiresAccount from "components/stac/RequiresAccount"; const Collection = () => { const { id } = useParams(); @@ -127,7 +126,6 @@ const Collection = () => {

Overview

- diff --git a/src/pages/Explore/components/ExploreInHub/SnippetCopyPanel.tsx b/src/pages/Explore/components/ExploreInHub/SnippetCopyPanel.tsx index a7e4cd93..7ec43575 100644 --- a/src/pages/Explore/components/ExploreInHub/SnippetCopyPanel.tsx +++ b/src/pages/Explore/components/ExploreInHub/SnippetCopyPanel.tsx @@ -4,7 +4,6 @@ import { DirectionalHint, FontWeights, FontSizes, - PrimaryButton, Stack, mergeStyleSets, getTheme, @@ -15,7 +14,6 @@ import { import { useId } from "@fluentui/react-hooks"; import { useCopyToClipboard } from "react-use"; -import { HUB_URL } from "utils/constants"; import NewTabLink from "components/controls/NewTabLink"; import { useCqlFormat } from "pages/Explore/utils/hooks/useStacFilter"; import { createCqlPythonSnippet, createItemPythonSnippet } from "./pythonSnippet"; @@ -48,8 +46,8 @@ const SnippetCopyPanel = ({ snippetType === "query" ? ( <> - Use the code below to recreate this search in the Planetary Computer Hub or - other Python analytic environments. Read more about searching using the{" "} + Use the code below to recreate this search in any Python analytic + environments. Read more about searching using the{" "} STAC API @@ -104,13 +102,6 @@ const SnippetCopyPanel = ({ > Copy - - Open Hub - diff --git a/src/pages/Explore/components/ExploreInHub/index.tsx b/src/pages/Explore/components/ExploreInHub/index.tsx index 02df28dc..d93910d3 100644 --- a/src/pages/Explore/components/ExploreInHub/index.tsx +++ b/src/pages/Explore/components/ExploreInHub/index.tsx @@ -9,7 +9,7 @@ const ExploreInHub = () => { return ( - Explore results in the Hub + Code snippet for search results { The Planetary Computer API makes it easy for users to find exactly the data they need, simplifying search and discovery across our Data Catalog. - - The Planetary Computer Hub is a development environment that makes our - data and APIs accessible through familiar, open-source tools, and allows - users to easily scale their analyses. - { color: "#fff", }} > - The Planetary Computer API and Hub are currently available in preview. - If you're interested in developing on our platform,{" "} - - request access now - - . + The Planetary Computer API is currently available in preview.

Learn more about Microsoft's{" "} { const { storageCollectionConfig } = useDataConfig(); @@ -91,3 +91,10 @@ const tokens: IStackTokens = { }; export const highContrastLinkColor = { color: "#006cbe" }; + +const messageBarStyles: IMessageBarStyles = { + root: { + padding: "4px 2px", + borderRadius: 4, + }, +}; diff --git a/src/utils/constants.js b/src/utils/constants.js index 37be4cb3..4b04a61b 100644 --- a/src/utils/constants.js +++ b/src/utils/constants.js @@ -18,7 +18,6 @@ export const DATA_URL = apiRoot.endsWith("stac") : `${tilerRoot}/api/data/v1`; export const IMAGE_URL = process.env.REACT_APP_IMAGE_API_ROOT || ""; -export const HUB_URL = process.env.REACT_APP_HUB_URL || ""; export const AUTH_URL = process.env.REACT_APP_AUTH_URL || apiRoot; export const AZMAPS_CLIENT_ID = process.env.REACT_APP_AZMAPS_CLIENT_ID; diff --git a/src/utils/index.ts b/src/utils/index.ts index 72ee4f1c..7e53fe50 100644 --- a/src/utils/index.ts +++ b/src/utils/index.ts @@ -2,7 +2,7 @@ import dayjs, { Dayjs } from "dayjs"; import utc from "dayjs/plugin/utc"; import { IStacCollection, IStacItem } from "types/stac"; -import { DATA_URL, HUB_URL, QS_REQUEST_ENTITY, REQUEST_ENTITY } from "./constants"; +import { DATA_URL, QS_REQUEST_ENTITY, REQUEST_ENTITY } from "./constants"; import * as qs from "query-string"; import { IMosaic, IMosaicRenderOption } from "pages/Explore/types"; import { DEFAULT_MIN_ZOOM } from "pages/Explore/utils/constants"; @@ -103,30 +103,6 @@ const configFromLauncher = (launcher: ILauncherConfig | string): ILauncherConfig return config; }; -export function buildHubLaunchUrl(filePath: string): string; -export function buildHubLaunchUrl(launchConfig: ILauncherConfig): string; -export function buildHubLaunchUrl(launcher: ILauncherConfig | string): string { - const { repo, branch, filePath } = configFromLauncher(launcher); - const urlRepo = encodeURIComponent(repo); - const urlBranch = encodeURIComponent(branch); - const repoName = repo.split("/").pop(); - - // Get a unique but arbitrary string for the workspace path. This works - // around in issue where nbgitpuller workspace may conflict with JupyterHub. - // The workspace can't contain / so substitute a - for any. - const fileWorkspace = filePath - .substring(filePath.indexOf("/") + 1, filePath.lastIndexOf(".")) - .replace(/\//g, "-"); - - const pathPrefix = filePath.endsWith(".ipynb") - ? `lab/workspaces/${fileWorkspace}/tree` - : "rstudio"; - - const urlPath = encodeURIComponent(`${pathPrefix}/${repoName}/${filePath}`); - - return `${HUB_URL}/user-redirect/git-pull?repo=${urlRepo}&urlpath=${urlPath}&branch=${urlBranch}`; -} - export function buildGitHubUrl(launcher: ILauncherConfig): string; export function buildGitHubUrl(launcher: string): string; export function buildGitHubUrl(launcher: ILauncherConfig | string): string {