diff --git a/website/pages/ar/sps/_meta.js b/website/pages/ar/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/ar/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/ar/sps/introduction.mdx b/website/pages/ar/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/ar/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/ar/sps/triggers-example.mdx b/website/pages/ar/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/ar/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/ar/sps/triggers.mdx b/website/pages/ar/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/ar/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/cs/sps/_meta.js b/website/pages/cs/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/cs/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/cs/sps/introduction.mdx b/website/pages/cs/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/cs/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/cs/sps/triggers-example.mdx b/website/pages/cs/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/cs/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/cs/sps/triggers.mdx b/website/pages/cs/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/cs/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/de/sps/_meta.js b/website/pages/de/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/de/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/de/sps/introduction.mdx b/website/pages/de/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/de/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/de/sps/triggers-example.mdx b/website/pages/de/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/de/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/de/sps/triggers.mdx b/website/pages/de/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/de/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/en/_meta.js b/website/pages/en/_meta.js index 38ea74ac7720..1933137f8d5c 100644 --- a/website/pages/en/_meta.js +++ b/website/pages/en/_meta.js @@ -33,6 +33,7 @@ export default { title: 'Substreams', }, substreams: '', + sps: 'Substreams-powered Subgraphs', '---4': { type: 'separator', }, diff --git a/website/pages/en/new-chain-integration.mdx b/website/pages/en/new-chain-integration.mdx index bc4f247011c3..a8a3c88c7250 100644 --- a/website/pages/en/new-chain-integration.mdx +++ b/website/pages/en/new-chain-integration.mdx @@ -76,33 +76,4 @@ Graph Node should be syncing the deployed subgraph if there are no errors. Give ## Substreams-powered Subgraphs -For StreamingFast-led Firehose/Substreams integrations, basic support for foundational Substreams modules (e.g. decoded transactions, logs and smart-contract events) and Substreams-powered subgraph codegen tools are included (check out [Injective](https://substreams.streamingfast.io/documentation/intro-getting-started/intro-injective/injective-first-sps) for an example). - -There are two options to consume Substreams data through a subgraph: - -- **Using Substreams triggers:** Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. -- **Using EntityChanges:** By writing more of the logic into Substreams, you can consume the module's output directly into `graph-node`. In `graph-node`, you can use the Substreams data to create your subgraph entities. - -It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in `graph-node`. Consider the following example implementing a subgraph handler: - -```ts -export function handleTransactions(bytes: Uint8Array): void { - let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. - if (transactions.length == 0) { - log.info('No transactions found', []) - return - } - - for (let i = 0; i < transactions.length; i++) { - // 2. - let transaction = transactions[i] - - let entity = new Transaction(transaction.hash) // 3. - entity.from = transaction.from - entity.to = transaction.to - entity.save() - } -} -``` - -The `handleTransactions` function is a subgraph handler that receives the raw Substreams bytes as parameter and decodes them into a `Transactions` object. Then, for every transaction, a new subgraph entity is created. For more information about Substreams triggers, visit the [StreamingFast documentation](https://substreams.streamingfast.io/documentation/consume/subgraph/triggers) or check out community modules at [substreams.dev](https://substreams.dev/). +For StreamingFast-led Firehose/Substreams integrations, basic support for foundational Substreams modules (e.g. decoded transactions, logs and smart-contract events) and Substreams codegen tools are included. These tools enable the ability to enable [Substreams-powered subgraphs](/sps/introduction). Follow the [How-To Guide](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application) and run `substreams codegen subgraph` to experience the codegen tools for yourself. diff --git a/website/pages/en/sps/_meta.js b/website/pages/en/sps/_meta.js new file mode 100644 index 000000000000..a8b84287610e --- /dev/null +++ b/website/pages/en/sps/_meta.js @@ -0,0 +1,5 @@ +export default { + introduction: 'Introduction', + triggers: '', + 'triggers-example': 'Tutorial', +} diff --git a/website/pages/en/sps/introduction.mdx b/website/pages/en/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/en/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/en/sps/triggers-example.mdx b/website/pages/en/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/en/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/en/sps/triggers.mdx b/website/pages/en/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/en/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/en/substreams.mdx b/website/pages/en/substreams.mdx index 710e110012cc..a838a6924e2f 100644 --- a/website/pages/en/substreams.mdx +++ b/website/pages/en/substreams.mdx @@ -4,9 +4,11 @@ title: Substreams ![Substreams Logo](/img/substreams-logo.png) -Substreams is a powerful blockchain indexing technology developed for The Graph Network. It enables developers to write Rust modules, compose data streams alongside the community, and provide extremely high-performance indexing due to parallelization in a streaming-first approach. +Substreams is a powerful blockchain indexing technology designed to enhance performance and scalability within The Graph Network. It offers the following features: -With Substreams, developers can quickly extract data from different blockchains (Ethereum, BNB, Solana, ect.) and send it to various locations of their choice, such as a Postgres database, a Mongo database, or a Subgraph. Additionally, Substreams packages enable developers to specify which data they want to extract from the blockchain. +- **Accelerated Indexing**: Substreams reduce subgraph indexing time thanks to a parallelized engine, enabling faster data retrieval and processing. +- **Multi-Chain Support**: Substreams expand indexing capabilities beyond EVM-based chains, supporting ecosystems like Solana, Injective, Starknet, and Vara. +- **Multi-Sink Support:** Subgraph, Postgres database, Clickhouse, Mongo database ## How Substreams Works in 4 Steps @@ -44,3 +46,7 @@ To learn about the latest version of Substreams CLI, which enables developers to ### Expand Your Knowledge - Take a look at the [Ethereum Explorer Tutorial](https://substreams.streamingfast.io/tutorials/evm) to learn about the basic transformations you can create with Substreams. + +### Substreams Registry + +A Substreams package is a precompiled binary file that defines the specific data you want to extract from the blockchain, similar to the `mapping.ts` file in traditional subgraphs. Visit [substreams.dev](https://substreams.dev/) to explore a growing collection of ready-to-use Substreams packages across various blockchain networks. diff --git a/website/pages/es/sps/_meta.js b/website/pages/es/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/es/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/es/sps/introduction.mdx b/website/pages/es/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/es/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/es/sps/triggers-example.mdx b/website/pages/es/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/es/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/es/sps/triggers.mdx b/website/pages/es/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/es/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/fr/sps/_meta.js b/website/pages/fr/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/fr/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/fr/sps/introduction.mdx b/website/pages/fr/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/fr/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/fr/sps/triggers-example.mdx b/website/pages/fr/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/fr/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/fr/sps/triggers.mdx b/website/pages/fr/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/fr/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/ha/sps/_meta.js b/website/pages/ha/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/ha/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/ha/sps/introduction.mdx b/website/pages/ha/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/ha/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/ha/sps/triggers-example.mdx b/website/pages/ha/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/ha/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/ha/sps/triggers.mdx b/website/pages/ha/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/ha/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/hi/sps/_meta.js b/website/pages/hi/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/hi/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/hi/sps/introduction.mdx b/website/pages/hi/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/hi/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/hi/sps/triggers-example.mdx b/website/pages/hi/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/hi/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/hi/sps/triggers.mdx b/website/pages/hi/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/hi/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/it/sps/_meta.js b/website/pages/it/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/it/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/it/sps/introduction.mdx b/website/pages/it/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/it/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/it/sps/triggers-example.mdx b/website/pages/it/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/it/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/it/sps/triggers.mdx b/website/pages/it/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/it/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/ja/sps/_meta.js b/website/pages/ja/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/ja/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/ja/sps/introduction.mdx b/website/pages/ja/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/ja/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/ja/sps/triggers-example.mdx b/website/pages/ja/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/ja/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/ja/sps/triggers.mdx b/website/pages/ja/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/ja/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/ko/sps/_meta.js b/website/pages/ko/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/ko/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/ko/sps/introduction.mdx b/website/pages/ko/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/ko/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/ko/sps/triggers-example.mdx b/website/pages/ko/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/ko/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/ko/sps/triggers.mdx b/website/pages/ko/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/ko/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/mr/sps/_meta.js b/website/pages/mr/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/mr/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/mr/sps/introduction.mdx b/website/pages/mr/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/mr/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/mr/sps/triggers-example.mdx b/website/pages/mr/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/mr/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/mr/sps/triggers.mdx b/website/pages/mr/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/mr/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/nl/sps/_meta.js b/website/pages/nl/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/nl/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/nl/sps/introduction.mdx b/website/pages/nl/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/nl/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/nl/sps/triggers-example.mdx b/website/pages/nl/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/nl/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/nl/sps/triggers.mdx b/website/pages/nl/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/nl/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/pl/sps/_meta.js b/website/pages/pl/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/pl/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/pl/sps/introduction.mdx b/website/pages/pl/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/pl/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/pl/sps/triggers-example.mdx b/website/pages/pl/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/pl/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/pl/sps/triggers.mdx b/website/pages/pl/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/pl/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/pt/sps/_meta.js b/website/pages/pt/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/pt/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/pt/sps/introduction.mdx b/website/pages/pt/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/pt/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/pt/sps/triggers-example.mdx b/website/pages/pt/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/pt/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/pt/sps/triggers.mdx b/website/pages/pt/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/pt/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/ro/sps/_meta.js b/website/pages/ro/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/ro/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/ro/sps/introduction.mdx b/website/pages/ro/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/ro/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/ro/sps/triggers-example.mdx b/website/pages/ro/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/ro/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/ro/sps/triggers.mdx b/website/pages/ro/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/ro/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/ru/sps/_meta.js b/website/pages/ru/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/ru/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/ru/sps/introduction.mdx b/website/pages/ru/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/ru/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/ru/sps/triggers-example.mdx b/website/pages/ru/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/ru/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/ru/sps/triggers.mdx b/website/pages/ru/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/ru/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/sv/sps/_meta.js b/website/pages/sv/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/sv/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/sv/sps/introduction.mdx b/website/pages/sv/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/sv/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/sv/sps/triggers-example.mdx b/website/pages/sv/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/sv/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/sv/sps/triggers.mdx b/website/pages/sv/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/sv/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/tr/sps/_meta.js b/website/pages/tr/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/tr/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/tr/sps/introduction.mdx b/website/pages/tr/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/tr/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/tr/sps/triggers-example.mdx b/website/pages/tr/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/tr/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/tr/sps/triggers.mdx b/website/pages/tr/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/tr/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/uk/sps/_meta.js b/website/pages/uk/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/uk/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/uk/sps/introduction.mdx b/website/pages/uk/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/uk/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/uk/sps/triggers-example.mdx b/website/pages/uk/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/uk/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/uk/sps/triggers.mdx b/website/pages/uk/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/uk/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/ur/sps/_meta.js b/website/pages/ur/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/ur/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/ur/sps/introduction.mdx b/website/pages/ur/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/ur/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/ur/sps/triggers-example.mdx b/website/pages/ur/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/ur/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/ur/sps/triggers.mdx b/website/pages/ur/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/ur/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/vi/sps/_meta.js b/website/pages/vi/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/vi/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/vi/sps/introduction.mdx b/website/pages/vi/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/vi/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/vi/sps/triggers-example.mdx b/website/pages/vi/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/vi/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/vi/sps/triggers.mdx b/website/pages/vi/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/vi/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/yo/sps/_meta.js b/website/pages/yo/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/yo/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/yo/sps/introduction.mdx b/website/pages/yo/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/yo/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/yo/sps/triggers-example.mdx b/website/pages/yo/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/yo/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/yo/sps/triggers.mdx b/website/pages/yo/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/yo/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/pages/zh/sps/_meta.js b/website/pages/zh/sps/_meta.js new file mode 100644 index 000000000000..4ebd7d55a84f --- /dev/null +++ b/website/pages/zh/sps/_meta.js @@ -0,0 +1,5 @@ +import meta from '../../en/sps/_meta.js' + +export default { + ...meta, +} diff --git a/website/pages/zh/sps/introduction.mdx b/website/pages/zh/sps/introduction.mdx new file mode 100644 index 000000000000..3e50521589af --- /dev/null +++ b/website/pages/zh/sps/introduction.mdx @@ -0,0 +1,19 @@ +--- +title: Introduction to Substreams-powered Subgraphs +--- + +By using a Substreams package (`.spkg`) as a data source, your subgraph gains access to a stream of pre-indexed blockchain data. This enables more efficient and scalable data handling, especially with large or complex blockchain networks. + +There are two methods of enabling this technology: + +Using Substreams [triggers](./triggers): Consume from any Substreams module by importing the Protobuf model through a subgraph handler and move all your logic into a subgraph. This method creates the subgraph entities directly in the subgraph. + +Using [Entity Changes](https://substreams.streamingfast.io/documentation/consume/subgraph/graph-out): By writing more of the logic into Substreams, you can consume the module's output directly into graph-node. In graph-node, you can use the Substreams data to create your subgraph entities. + +It is really a matter of where you put your logic, in the subgraph or the Substreams. Keep in mind that having more of your logic in Substreams benefits from a parallelized model, whereas triggers will be linearly consumed in graph-node. + +Visit the following links for How-To Guides on using code-generation tooling to build your first end-to-end project quickly: + +- [Solana](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/solana) +- [EVM](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/evm) +- [Injective](https://substreams.streamingfast.io/documentation/how-to-guides/intro-your-first-application/injective) diff --git a/website/pages/zh/sps/triggers-example.mdx b/website/pages/zh/sps/triggers-example.mdx new file mode 100644 index 000000000000..d8d61566295e --- /dev/null +++ b/website/pages/zh/sps/triggers-example.mdx @@ -0,0 +1,137 @@ +--- +title: 'Tutorial: Set Up a Substreams-Powered Subgraph on Solana' +--- + +## Prerequisites + +Before starting, make sure to: + +- Complete the [Getting Started Guide](https://github.com/streamingfast/substreams-starter) to set up your development environment using a Dev Container. +- Be familiar with The Graph and basic blockchain concepts such as transactions and Protobufs. + +## Step 1: Initialize Your Project + +1. Open your Dev Container and run the following command to initialize your project: + + ```bash + substreams init + ``` + +2. Select the "minimal" project option. +3. Replace the contents of the generated `substreams.yaml` file with the following configuration, which filters transactions for the Orca account on the SPL token program ID: + +```yaml +specVersion: v0.1.0 +package: + name: my_project_sol + version: v0.1.0 + +imports: # Pass your spkg of interest + solana: https://github.com/streamingfast/substreams-solana-spl-token/raw/master/tokens/solana-spl-token-v0.1.0.spkg + +modules: + - name: map_spl_transfers + use: solana:map_block # Select corresponding modules available within your spkg + initialBlock: 260000082 + + - name: map_transactions_by_programid + use: solana:solana:transactions_by_programid_without_votes + +network: solana-mainnet-beta + +params: # Modify the param fields to meet your needs + # For program_id: TokenkegQfeZyiNwAJbNbGKPFXCWuBvf9Ss623VQ5DA + map_spl_transfers: token_contract:orcaEKTdK7LKz57vaAYr9QeNsVEPfiu6QeMU1kektZE +``` + +## Step 2: Generate the Subgraph Manifest + +Once the project is initialized, generate a subgraph manifest by running the following command in the Dev Container: + +```bash +substreams codegen subgraph +``` + +You will generate a`subgraph.yaml` manifest which imports the Substreams package as a data source: + +```yaml +--- +dataSources: + - kind: substreams + name: my_project_sol + network: solana-mainnet-beta + source: + package: + moduleName: map_spl_transfers # Module defined in the substreams.yaml + file: ./my-project-sol-v0.1.0.spkg + mapping: + apiVersion: 0.0.7 + kind: substreams/graph-entities + file: ./src/mappings.ts + handler: handleTriggers +``` + +## Step 3: Define Entities in `schema.graphql` + +Define the fields you want to save in your subgraph entities by updating the `schema.graphql` file. Here is an example: + +```graphql +type MyTransfer @entity { + id: ID! + amount: String! + source: String! + designation: String! + signers: [String!]! +} +``` + +This schema defines a `MyTransfer` entity with fields such as `id`, `amount`, `source`, `designation`, and `signers`. + +## Step 4: Generate Protobuf Files + +To generate Protobuf objects in AssemblyScript, run the following command: + +```bash +npm run protogen +``` + +This command converts the Protobuf definitions into AssemblyScript, allowing you to use them in the subgraph's handler. + +## Step 5: Handle Substreams Data in `mappings.ts` + +With the Protobuf objects generated, you can now handle the decoded Substreams data in your `mappings.ts` file found in the `./src` directory. The example below demonstrates how to extract to subgraph entities the non-derived transfers associated to the Orca account id: + +```ts +import { Protobuf } from 'as-proto/assembly' +import { Events as protoEvents } from './pb/sf/solana/spl/token/v1/Events' +import { MyTransfer } from '../generated/schema' + +export function handleTriggers(bytes: Uint8Array): void { + const input: protoEvents = Protobuf.decode(bytes, protoEvents.decode) + + for (let i = 0; i < input.data.length; i++) { + const event = input.data[i] + + if (event.transfer != null) { + let entity_id: string = `${event.txnId}-${i}` + const entity = new MyTransfer(entity_id) + entity.amount = event.transfer!.instruction!.amount.toString() + entity.source = event.transfer!.accounts!.source + entity.designation = event.transfer!.accounts!.destination + + if (event.transfer!.accounts!.signer!.single != null) { + entity.signers = [event.transfer!.accounts!.signer!.single.signer] + } else if (event.transfer!.accounts!.signer!.multisig != null) { + entity.signers = event.transfer!.accounts!.signer!.multisig!.signers + } + entity.save() + } + } +} +``` + +## Conclusion + +You’ve successfully set up a trigger-based Substreams-powered subgraph for a Solana SPL token. You can now further customize your schema, mappings, and modules to suit your specific use case. + +For more advanced customization and optimizations, check out the official [Substreams documentation](https://substreams.streamingfast.io/tutorials/solana). diff --git a/website/pages/zh/sps/triggers.mdx b/website/pages/zh/sps/triggers.mdx new file mode 100644 index 000000000000..ed19635d4768 --- /dev/null +++ b/website/pages/zh/sps/triggers.mdx @@ -0,0 +1,37 @@ +--- +title: Substreams Triggers +--- + +Custom triggers allow you to send data directly into your subgraph mappings file and entities (similar to tables and fields), enabling full use of the GraphQL layer. By importing the Protobuf definitions emitted by your Substreams module, you can receive and process this data within your subgraph’s handler, ensuring efficient and streamlined data management within the subgraph framework. + +> Note: If you haven’t already, visit one of the How-To Guides found [here](./introduction) to scaffold your first project in the Development Container. + +The following code demonstrates how to define a `handleTransactions` function in a subgraph handler. This function receives raw Substreams bytes as a parameter and decodes them into a `Transactions` object. For each transaction, a new subgraph entity is created. + +```tsx +export function handleTransactions(bytes: Uint8Array): void { + let transactions = assembly.eth.transaction.v1.Transactions.decode(bytes.buffer).trasanctions // 1. + if (transactions.length == 0) { + log.info('No transactions found', []) + return + } + + for (let i = 0; i < transactions.length; i++) { + // 2. + let transaction = transactions[i] + + let entity = new Transaction(transaction.hash) // 3. + entity.from = transaction.from + entity.to = transaction.to + entity.save() + } +} +``` + +Here's what you’re seeing in the `mappings.ts` file: + +1. The bytes containing Substreams data are decoded into the generated `Transactions` object, this object is used like any other AssemblyScript object +2. Looping over the transactions +3. Create a new subgraph entity for every transaction + +To go through a detailed example of a trigger-based subgraph, [click here](./triggers-example). diff --git a/website/route-lockfile.txt b/website/route-lockfile.txt index ac7f9bf98564..37beda87e6f5 100644 --- a/website/route-lockfile.txt +++ b/website/route-lockfile.txt @@ -60,9 +60,13 @@ /ar/quick-start/ /ar/release-notes/assemblyscript-migration-guide/ /ar/release-notes/graphql-validations-migration-guide/ +/ar/sps/introduction/ +/ar/sps/triggers-example/ +/ar/sps/triggers/ /ar/substreams/ /ar/sunrise/ /ar/supported-network-requirements/ +/ar/tap/ /ar/tokenomics/ /cs/ /cs/404/ @@ -125,9 +129,13 @@ /cs/quick-start/ /cs/release-notes/assemblyscript-migration-guide/ /cs/release-notes/graphql-validations-migration-guide/ +/cs/sps/introduction/ +/cs/sps/triggers-example/ +/cs/sps/triggers/ /cs/substreams/ /cs/sunrise/ /cs/supported-network-requirements/ +/cs/tap/ /cs/tokenomics/ /de/about/ /de/arbitrum/arbitrum-faq/ @@ -188,14 +196,17 @@ /de/quick-start/ /de/release-notes/assemblyscript-migration-guide/ /de/release-notes/graphql-validations-migration-guide/ +/de/sps/introduction/ +/de/sps/triggers-example/ +/de/sps/triggers/ /de/substreams/ /de/sunrise/ /de/supported-network-requirements/ +/de/tap/ /de/tokenomics/ /en/ /en/404/ /en/about/ -/en/arbitrum-faq/ -> /en/arbitrum/arbitrum-faq/ /en/arbitrum/arbitrum-faq/ /en/arbitrum/l2-transfer-tools-faq/ /en/arbitrum/l2-transfer-tools-guide/ @@ -203,7 +214,6 @@ /en/chain-integration-overview/ /en/cookbook/arweave/ /en/cookbook/avoid-eth-calls/ -/en/cookbook/base-testnet/ /en/cookbook/cosmos/ /en/cookbook/derivedfrom/ /en/cookbook/grafting/ @@ -214,15 +224,11 @@ /en/cookbook/subgraph-debug-forking/ /en/cookbook/subgraph-uncrashable/ /en/cookbook/substreams-powered-subgraphs/ -/en/cookbook/upgrading-a-subgraph/ /en/deploying/deploying-a-subgraph-to-studio/ /en/deploying/subgraph-studio-faqs/ /en/deploying/subgraph-studio/ -/en/developer/assemblyscript-api/ -> /en/developing/graph-ts/api/ -/en/developing/assemblyscript-api/ -> /en/developing/graph-ts/api/ /en/developing/creating-a-subgraph/ /en/developing/developer-faqs/ -/en/developing/graph-ts/ -> /en/developing/graph-ts/README/ /en/developing/graph-ts/CHANGELOG/ /en/developing/graph-ts/README/ /en/developing/graph-ts/api/ @@ -247,7 +253,6 @@ /en/operating-graph-node/ /en/publishing/publishing-a-subgraph/ /en/querying/distributed-systems/ -/en/querying/graph-client/ -> /en/querying/graph-client/README/ /en/querying/graph-client/README/ /en/querying/graph-client/architecture/ /en/querying/graph-client/live/ @@ -261,9 +266,13 @@ /en/quick-start/ /en/release-notes/assemblyscript-migration-guide/ /en/release-notes/graphql-validations-migration-guide/ +/en/sps/introduction/ +/en/sps/triggers-example/ +/en/sps/triggers/ /en/substreams/ /en/sunrise/ /en/supported-network-requirements/ +/en/tap/ /en/tokenomics/ /es/ /es/404/ @@ -326,9 +335,13 @@ /es/quick-start/ /es/release-notes/assemblyscript-migration-guide/ /es/release-notes/graphql-validations-migration-guide/ +/es/sps/introduction/ +/es/sps/triggers-example/ +/es/sps/triggers/ /es/substreams/ /es/sunrise/ /es/supported-network-requirements/ +/es/tap/ /es/tokenomics/ /fr/ /fr/404/ @@ -391,9 +404,13 @@ /fr/quick-start/ /fr/release-notes/assemblyscript-migration-guide/ /fr/release-notes/graphql-validations-migration-guide/ +/fr/sps/introduction/ +/fr/sps/triggers-example/ +/fr/sps/triggers/ /fr/substreams/ /fr/sunrise/ /fr/supported-network-requirements/ +/fr/tap/ /fr/tokenomics/ /ha/about/ /ha/arbitrum/arbitrum-faq/ @@ -448,9 +465,13 @@ /ha/quick-start/ /ha/release-notes/assemblyscript-migration-guide/ /ha/release-notes/graphql-validations-migration-guide/ +/ha/sps/introduction/ +/ha/sps/triggers-example/ +/ha/sps/triggers/ /ha/substreams/ /ha/sunrise/ /ha/supported-network-requirements/ +/ha/tap/ /ha/tokenomics/ /hi/ /hi/404/ @@ -513,9 +534,13 @@ /hi/quick-start/ /hi/release-notes/assemblyscript-migration-guide/ /hi/release-notes/graphql-validations-migration-guide/ +/hi/sps/introduction/ +/hi/sps/triggers-example/ +/hi/sps/triggers/ /hi/substreams/ /hi/sunrise/ /hi/supported-network-requirements/ +/hi/tap/ /hi/tokenomics/ /it/ /it/404/ @@ -578,9 +603,13 @@ /it/quick-start/ /it/release-notes/assemblyscript-migration-guide/ /it/release-notes/graphql-validations-migration-guide/ +/it/sps/introduction/ +/it/sps/triggers-example/ +/it/sps/triggers/ /it/substreams/ /it/sunrise/ /it/supported-network-requirements/ +/it/tap/ /it/tokenomics/ /ja/ /ja/404/ @@ -643,9 +672,13 @@ /ja/quick-start/ /ja/release-notes/assemblyscript-migration-guide/ /ja/release-notes/graphql-validations-migration-guide/ +/ja/sps/introduction/ +/ja/sps/triggers-example/ +/ja/sps/triggers/ /ja/substreams/ /ja/sunrise/ /ja/supported-network-requirements/ +/ja/tap/ /ja/tokenomics/ /ko/about/ /ko/arbitrum/arbitrum-faq/ @@ -706,9 +739,13 @@ /ko/quick-start/ /ko/release-notes/assemblyscript-migration-guide/ /ko/release-notes/graphql-validations-migration-guide/ +/ko/sps/introduction/ +/ko/sps/triggers-example/ +/ko/sps/triggers/ /ko/substreams/ /ko/sunrise/ /ko/supported-network-requirements/ +/ko/tap/ /ko/tokenomics/ /mr/ /mr/404/ @@ -771,9 +808,13 @@ /mr/quick-start/ /mr/release-notes/assemblyscript-migration-guide/ /mr/release-notes/graphql-validations-migration-guide/ +/mr/sps/introduction/ +/mr/sps/triggers-example/ +/mr/sps/triggers/ /mr/substreams/ /mr/sunrise/ /mr/supported-network-requirements/ +/mr/tap/ /mr/tokenomics/ /nl/about/ /nl/arbitrum/arbitrum-faq/ @@ -834,9 +875,13 @@ /nl/quick-start/ /nl/release-notes/assemblyscript-migration-guide/ /nl/release-notes/graphql-validations-migration-guide/ +/nl/sps/introduction/ +/nl/sps/triggers-example/ +/nl/sps/triggers/ /nl/substreams/ /nl/sunrise/ /nl/supported-network-requirements/ +/nl/tap/ /nl/tokenomics/ /pl/about/ /pl/arbitrum/arbitrum-faq/ @@ -897,9 +942,13 @@ /pl/quick-start/ /pl/release-notes/assemblyscript-migration-guide/ /pl/release-notes/graphql-validations-migration-guide/ +/pl/sps/introduction/ +/pl/sps/triggers-example/ +/pl/sps/triggers/ /pl/substreams/ /pl/sunrise/ /pl/supported-network-requirements/ +/pl/tap/ /pl/tokenomics/ /pt/ /pt/404/ @@ -962,9 +1011,13 @@ /pt/quick-start/ /pt/release-notes/assemblyscript-migration-guide/ /pt/release-notes/graphql-validations-migration-guide/ +/pt/sps/introduction/ +/pt/sps/triggers-example/ +/pt/sps/triggers/ /pt/substreams/ /pt/sunrise/ /pt/supported-network-requirements/ +/pt/tap/ /pt/tokenomics/ /ro/about/ /ro/arbitrum/arbitrum-faq/ @@ -1025,9 +1078,13 @@ /ro/quick-start/ /ro/release-notes/assemblyscript-migration-guide/ /ro/release-notes/graphql-validations-migration-guide/ +/ro/sps/introduction/ +/ro/sps/triggers-example/ +/ro/sps/triggers/ /ro/substreams/ /ro/sunrise/ /ro/supported-network-requirements/ +/ro/tap/ /ro/tokenomics/ /ru/ /ru/404/ @@ -1090,9 +1147,13 @@ /ru/quick-start/ /ru/release-notes/assemblyscript-migration-guide/ /ru/release-notes/graphql-validations-migration-guide/ +/ru/sps/introduction/ +/ru/sps/triggers-example/ +/ru/sps/triggers/ /ru/substreams/ /ru/sunrise/ /ru/supported-network-requirements/ +/ru/tap/ /ru/tokenomics/ /sv/ /sv/404/ @@ -1155,9 +1216,13 @@ /sv/quick-start/ /sv/release-notes/assemblyscript-migration-guide/ /sv/release-notes/graphql-validations-migration-guide/ +/sv/sps/introduction/ +/sv/sps/triggers-example/ +/sv/sps/triggers/ /sv/substreams/ /sv/sunrise/ /sv/supported-network-requirements/ +/sv/tap/ /sv/tokenomics/ /tr/ /tr/404/ @@ -1220,9 +1285,13 @@ /tr/quick-start/ /tr/release-notes/assemblyscript-migration-guide/ /tr/release-notes/graphql-validations-migration-guide/ +/tr/sps/introduction/ +/tr/sps/triggers-example/ +/tr/sps/triggers/ /tr/substreams/ /tr/sunrise/ /tr/supported-network-requirements/ +/tr/tap/ /tr/tokenomics/ /uk/about/ /uk/arbitrum/arbitrum-faq/ @@ -1283,9 +1352,13 @@ /uk/quick-start/ /uk/release-notes/assemblyscript-migration-guide/ /uk/release-notes/graphql-validations-migration-guide/ +/uk/sps/introduction/ +/uk/sps/triggers-example/ +/uk/sps/triggers/ /uk/substreams/ /uk/sunrise/ /uk/supported-network-requirements/ +/uk/tap/ /uk/tokenomics/ /ur/ /ur/404/ @@ -1348,9 +1421,13 @@ /ur/quick-start/ /ur/release-notes/assemblyscript-migration-guide/ /ur/release-notes/graphql-validations-migration-guide/ +/ur/sps/introduction/ +/ur/sps/triggers-example/ +/ur/sps/triggers/ /ur/substreams/ /ur/sunrise/ /ur/supported-network-requirements/ +/ur/tap/ /ur/tokenomics/ /vi/about/ /vi/arbitrum/arbitrum-faq/ @@ -1411,9 +1488,13 @@ /vi/quick-start/ /vi/release-notes/assemblyscript-migration-guide/ /vi/release-notes/graphql-validations-migration-guide/ +/vi/sps/introduction/ +/vi/sps/triggers-example/ +/vi/sps/triggers/ /vi/substreams/ /vi/sunrise/ /vi/supported-network-requirements/ +/vi/tap/ /vi/tokenomics/ /yo/about/ /yo/arbitrum/arbitrum-faq/ @@ -1474,9 +1555,13 @@ /yo/quick-start/ /yo/release-notes/assemblyscript-migration-guide/ /yo/release-notes/graphql-validations-migration-guide/ +/yo/sps/introduction/ +/yo/sps/triggers-example/ +/yo/sps/triggers/ /yo/substreams/ /yo/sunrise/ /yo/supported-network-requirements/ +/yo/tap/ /yo/tokenomics/ /zh/ /zh/404/ @@ -1539,7 +1624,11 @@ /zh/quick-start/ /zh/release-notes/assemblyscript-migration-guide/ /zh/release-notes/graphql-validations-migration-guide/ +/zh/sps/introduction/ +/zh/sps/triggers-example/ +/zh/sps/triggers/ /zh/substreams/ /zh/sunrise/ /zh/supported-network-requirements/ +/zh/tap/ /zh/tokenomics/