Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

v2.16.0 #523

Merged
merged 21 commits into from
Jan 7, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
21 commits
Select commit Hold shift + click to select a range
9c1659d
SN1-362 fix issues with scoring endpoint (#507)
Hollyqui Dec 19, 2024
b0d8e85
Add Mixture-of-Miners endpoint (#472)
dbobrenko Dec 20, 2024
472ba83
Revert debug mode
dbobrenko Dec 20, 2024
0194a5d
Fixing task reproducibility
Hollyqui Dec 20, 2024
3dbab91
Fix formatting (#513)
dbobrenko Dec 21, 2024
6b2e6ba
Improving docs, fixing settings
Hollyqui Dec 23, 2024
f44d62c
Improving docs, fixing settings (#514)
Hollyqui Dec 23, 2024
9e99288
Merge branch 'staging' of https://github.com/macrocosm-os/prompting i…
Hollyqui Dec 23, 2024
7753f1a
Changing transformers version
Hollyqui Dec 23, 2024
4c75c4a
Adding API key checking
Hollyqui Dec 23, 2024
8005f03
Merge branch 'main' into hotfix/check-api-key
Hollyqui Dec 23, 2024
9977853
run precommit fix
richwardle Jan 2, 2025
4e560b1
Merge branch 'main' into merge-main-into-staging
bkb2135 Jan 2, 2025
0036843
Merge pull request #522 from macrocosm-os/merge-main-into-staging
bkb2135 Jan 2, 2025
1454af0
Merge pull request #516 from macrocosm-os/hotfix/check-api-key
bkb2135 Jan 2, 2025
dbeaa29
Merge pull request #511 from macrocosm-os/feature/SN1-368-clean
bkb2135 Jan 2, 2025
ebb4f85
SN1-361: Query multiple miners to return fastest response + Scoring b…
Hollyqui Jan 3, 2025
667e264
Remove py3.9 from github actions (#527)
Hollyqui Jan 3, 2025
4654d3a
Adding web retrieval endpoint (#528)
Hollyqui Jan 3, 2025
f58965e
Update miner.py
bkb2135 Jan 7, 2025
76fb522
Merge pull request #529 from macrocosm-os/bkb2135-patch-1
bkb2135 Jan 7, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ jobs:
strategy:
fail-fast: false
matrix:
python-version: ["3.9", "3.10"]
python-version: ["3.10"]

steps:
- uses: actions/checkout@v3
Expand Down
34 changes: 17 additions & 17 deletions neurons/miners/epistula_miner/miner.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@
# ruff: noqa: E402
from prompting import settings
from shared import settings

settings.settings = settings.Settings.load(mode="miner")
settings = settings.settings
settings.shared_settings = settings.SharedSettings.load(mode="miner")
shared_settings = settings.shared_settings

import asyncio
import json
Expand Down Expand Up @@ -39,7 +39,7 @@ def __init__(self):
self.client = httpx.AsyncClient(
base_url="https://api.openai.com/v1",
headers={
"Authorization": f"Bearer {settings.OPENAI_API_KEY}",
"Authorization": f"Bearer {shared_settings.OPENAI_API_KEY}",
"Content-Type": "application/json",
},
)
Expand Down Expand Up @@ -107,14 +107,14 @@ async def verify_request(

signed_by = request.headers.get("Epistula-Signed-By")
signed_for = request.headers.get("Epistula-Signed-For")
if signed_for != settings.WALLET.hotkey.ss58_address:
if signed_for != shared_settings.WALLET.hotkey.ss58_address:
raise HTTPException(status_code=400, detail="Bad Request, message is not intended for self")
if signed_by not in settings.METAGRAPH.hotkeys:
if signed_by not in shared_settings.METAGRAPH.hotkeys:
raise HTTPException(status_code=401, detail="Signer not in metagraph")

uid = settings.METAGRAPH.hotkeys.index(signed_by)
stake = settings.METAGRAPH.S[uid].item()
if not settings.NETUID == 61 and stake < 10000:
uid = shared_settings.METAGRAPH.hotkeys.index(signed_by)
stake = shared_settings.METAGRAPH.S[uid].item()
if not shared_settings.NETUID == 61 and stake < 10000:
logger.warning(f"Blacklisting request from {signed_by} [uid={uid}], not enough stake -- {stake}")
raise HTTPException(status_code=401, detail="Stake below minimum: {stake}")

Expand All @@ -133,7 +133,7 @@ async def verify_request(
raise HTTPException(status_code=400, detail=err)

def run(self):
external_ip = None # settings.EXTERNAL_IP
external_ip = None # shared_settings.EXTERNAL_IP
if not external_ip or external_ip == "[::]":
try:
external_ip = requests.get("https://checkip.amazonaws.com").text.strip()
Expand All @@ -142,16 +142,16 @@ def run(self):
logger.error("Failed to get external IP")

logger.info(
f"Serving miner endpoint {external_ip}:{settings.AXON_PORT} on network: {settings.SUBTENSOR_NETWORK} with netuid: {settings.NETUID}"
f"Serving miner endpoint {external_ip}:{shared_settings.AXON_PORT} on network: {shared_settings.SUBTENSOR_NETWORK} with netuid: {shared_settings.NETUID}"
)

serve_success = serve_extrinsic(
subtensor=settings.SUBTENSOR,
wallet=settings.WALLET,
subtensor=shared_settings.SUBTENSOR,
wallet=shared_settings.WALLET,
ip=external_ip,
port=settings.AXON_PORT,
port=shared_settings.AXON_PORT,
protocol=4,
netuid=settings.NETUID,
netuid=shared_settings.NETUID,
)
if not serve_success:
logger.error("Failed to serve endpoint")
Expand All @@ -174,15 +174,15 @@ def run(self):
fast_config = uvicorn.Config(
app,
host="0.0.0.0",
port=settings.AXON_PORT,
port=shared_settings.AXON_PORT,
log_level="info",
loop="asyncio",
workers=4,
)
self.fast_api = FastAPIThreadedServer(config=fast_config)
self.fast_api.start()

logger.info(f"Miner starting at block: {settings.SUBTENSOR.block}")
logger.info(f"Miner starting at block: {shared_settings.SUBTENSOR.block}")

# Main execution loop.
try:
Expand Down
6 changes: 3 additions & 3 deletions neurons/test_vanilla_post.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import openai
from httpx import Timeout

from prompting import settings
from shared import settings

settings.settings = settings.Settings.load(mode="validator")
settings = settings.settings
settings.shared_settings = settings.SharedSettings.load(mode="validator")
shared_settings = settings.shared_settings

from shared.epistula import create_header_hook

Expand Down
7 changes: 4 additions & 3 deletions neurons/validator.py
Original file line number Diff line number Diff line change
Expand Up @@ -81,11 +81,12 @@ async def spawn_loops(task_queue, scoring_queue, reward_events):
asyncio.run(spawn_loops(task_queue, scoring_queue, reward_events))


def start_api():
def start_api(scoring_queue, reward_events):
async def start():
from prompting.api.api import start_scoring_api # noqa: F401

await start_scoring_api()
await start_scoring_api(scoring_queue, reward_events)

while True:
await asyncio.sleep(10)
logger.debug("Running API...")
Expand Down Expand Up @@ -125,7 +126,7 @@ async def main():

if shared_settings.DEPLOY_SCORING_API:
# Use multiprocessing to bypass API blocking issue
api_process = mp.Process(target=start_api, name="API_Process")
api_process = mp.Process(target=start_api, args=(scoring_queue, reward_events), name="API_Process")
api_process.start()
processes.append(api_process)

Expand Down
5 changes: 4 additions & 1 deletion prompting/api/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from prompting.api.miner_availabilities.api import router as miner_availabilities_router
from prompting.api.scoring.api import router as scoring_router
from prompting.rewards.scoring import task_scorer
from shared.settings import shared_settings

app = FastAPI()
Expand All @@ -17,7 +18,9 @@ def health():
return {"status": "healthy"}


async def start_scoring_api():
async def start_scoring_api(scoring_queue, reward_events):
task_scorer.scoring_queue = scoring_queue
task_scorer.reward_events = reward_events
logger.info(f"Starting Scoring API on https://0.0.0.0:{shared_settings.SCORING_API_PORT}")
uvicorn.run(
"prompting.api.api:app", host="0.0.0.0", port=shared_settings.SCORING_API_PORT, loop="asyncio", reload=False
Expand Down
72 changes: 53 additions & 19 deletions prompting/api/scoring/api.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,11 @@
from fastapi import APIRouter, Depends, Header, HTTPException, Request
from loguru import logger

from prompting.datasets.random_website import DDGDatasetEntry
from prompting.llms.model_zoo import ModelZoo
from prompting.rewards.scoring import task_scorer
from prompting.tasks.inference import InferenceTask
from prompting.tasks.web_retrieval import WebRetrievalTask
from shared.base import DatasetEntry
from shared.dendrite import DendriteResponseEvent
from shared.epistula import SynapseStreamResult
Expand Down Expand Up @@ -37,22 +39,54 @@ async def score_response(request: Request, api_key_data: dict = Depends(validate
uid = int(payload.get("uid"))
chunks = payload.get("chunks")
llm_model = ModelZoo.get_model_by_id(model) if (model := body.get("model")) else None
task_scorer.add_to_queue(
task=InferenceTask(
messages=[msg["content"] for msg in body.get("messages")],
llm_model=llm_model,
llm_model_id=body.get("model"),
seed=int(body.get("seed", 0)),
sampling_params=body.get("sampling_params", {}),
),
response=DendriteResponseEvent(
uids=[uid],
stream_results=[SynapseStreamResult(accumulated_chunks=[chunk for chunk in chunks if chunk is not None])],
timeout=shared_settings.NEURON_TIMEOUT,
),
dataset_entry=DatasetEntry(),
block=shared_settings.METAGRAPH.block,
step=-1,
task_id=str(uuid.uuid4()),
)
logger.info("Organic tas appended to scoring queue")
task = body.get("task")
if task == "InferenceTask":
logger.info(f"Received Organic InferenceTask with body: {body}")
task_scorer.add_to_queue(
task=InferenceTask(
messages=[msg["content"] for msg in body.get("messages")],
llm_model=llm_model,
llm_model_id=body.get("model"),
seed=int(body.get("seed", 0)),
sampling_params=body.get("sampling_params", {}),
),
response=DendriteResponseEvent(
uids=[uid],
stream_results=[
SynapseStreamResult(accumulated_chunks=[chunk for chunk in chunks if chunk is not None])
],
timeout=shared_settings.NEURON_TIMEOUT,
),
dataset_entry=DatasetEntry(),
block=shared_settings.METAGRAPH.block,
step=-1,
task_id=str(uuid.uuid4()),
)
elif task == "WebRetrievalTask":
logger.info(f"Received Organic WebRetrievalTask with body: {body}")
try:
search_term = body.get("messages")[0].get("content")
except Exception as ex:
logger.error(f"Failed to get search term from messages: {ex}, can't score WebRetrievalTask")
return

task_scorer.add_to_queue(
task=WebRetrievalTask(
messages=[msg["content"] for msg in body.get("messages")],
seed=int(body.get("seed", 0)),
sampling_params=body.get("sampling_params", {}),
query=search_term,
),
response=DendriteResponseEvent(
uids=[uid],
stream_results=[
SynapseStreamResult(accumulated_chunks=[chunk for chunk in chunks if chunk is not None])
],
timeout=shared_settings.NEURON_TIMEOUT,
),
dataset_entry=DDGDatasetEntry(search_term=search_term),
block=shared_settings.METAGRAPH.block,
step=-1,
task_id=str(uuid.uuid4()),
)
logger.info("Organic task appended to scoring queue")
4 changes: 2 additions & 2 deletions prompting/datasets/random_website.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,8 +15,8 @@

class DDGDatasetEntry(DatasetEntry):
search_term: str
website_url: str
website_content: str
website_url: str = None
website_content: str = None


class DDGDataset(BaseDataset):
Expand Down
134 changes: 0 additions & 134 deletions prompting/settings.py

This file was deleted.

2 changes: 1 addition & 1 deletion prompting/weight_setting/weight_setter.py
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ def set_weights(
"weights": processed_weights.flatten(),
"raw_weights": str(list(weights.flatten())),
"averaged_weights": str(list(averaged_weights.flatten())),
"block": ttl_get_block(),
"block": ttl_get_block(subtensor=subtensor),
}
)
step_filename = "weights.csv"
Expand Down
Loading
Loading