Skip to content

Commit

Permalink
Merge pull request #90 from kookmin-sw/jmpark
Browse files Browse the repository at this point in the history
Apply FM (llama, diffusion) deploy api in console (frontend)
  • Loading branch information
j-myeong authored May 19, 2024
2 parents cac87e1 + 0a155b6 commit d217387
Show file tree
Hide file tree
Showing 2 changed files with 129 additions and 17 deletions.
108 changes: 107 additions & 1 deletion frontend/sskai-console/src/api/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,12 @@ const INFERENCE_SERVERLESS_API = import.meta.env
.VITE_INFERENCE_SERVERLESS_API_URL;
const MODEL_PROFILE_API = import.meta.env.VITE_MODEL_PROFILE_API_URL;
const USER_TRAIN_API = import.meta.env.VITE_USER_TRAIN_API_URL;
const LLAMA_TRAIN_API = import.meta.env.VITE_LLAMA_TRAIN_API_URL;
const DIFFUSION_TRAIN_API = import.meta.env.VITE_DIFFUSION_TRAIN_API_URL;
const STREAMLIT_API = import.meta.env.VITE_STREAMLIT_API_URL;
const INFERENCE_LLAMA_API = import.meta.env.VITE_INFERENCE_LLAMA_API_URL;
const INFERENCE_DIFFUSION_API = import.meta.env
.VITE_INFERENCE_DIFFUSION_API_URL;

// Model
export const createModel = async (args) => {
Expand Down Expand Up @@ -428,6 +433,108 @@ export const manageStreamlit = async ({
return res.status === 200;
};

export const createFMInference = async (type, args) => {
const res = await axios
.post(`${DB_API}/inferences`, {
user: args.user,
name: args.name,
model: args.model,
model_type: args.model_type,
type: args.type
})
.catch((err) => err);

if (!res?.data) {
console.error(res);
return false;
}

const { Item } = res.data.inference;

if (type === 'llama') {
const llama = await axios
.post(`${INFERENCE_LLAMA_API}`, {
uid: Item.uid,
user: args.user,
action: 'create',
model: args.model_detail
})
.catch((err) => err);

if (llama.status !== 200) {
await axios.delete(`${DB_API}/inferences/${Item.uid}`);
return false;
}

await createLog({
user: args.user,
name: args.name,
kind_of_job: 'inference',
job: 'Endpoint (Llama) Created'
});
} else if (type === 'diffusion') {
const diffusion = await axios
.post(`${INFERENCE_DIFFUSION_API}`, {
uid: Item.uid,
user: args.user,
action: 'create',
model: args.model_detail
})
.catch((err) => err);

if (diffusion.status !== 200) {
await axios.delete(`${DB_API}/inferences/${Item.uid}`);
return false;
}

await createLog({
user: args.user,
name: args.name,
kind_of_job: 'inference',
job: 'Endpoint (Diffusion) Created'
});
}
return Item;
};

export const deleteFMInference = async (type, args) => {
if (type === 'llama') {
const llama = await axios
.post(`${INFERENCE_LLAMA_API}`, {
uid: args.uid,
user: args.user,
action: 'delete'
})
.catch((err) => err);

await createLog({
user: args.user,
name: args.name,
kind_of_job: 'inference',
job: 'Endpoint (Llama) Deleted'
});

return llama.status === 200;
} else if (type === 'diffusion') {
const diffusion = await axios
.post(`${INFERENCE_DIFFUSION_API}`, {
uid: args.uid,
user: args.user,
action: 'delete'
})
.catch((err) => err);

await createLog({
user: args.user,
name: args.name,
kind_of_job: 'inference',
job: 'Endpoint (Diffusion) Deleted'
});

return diffusion.status === 200;
}
};

// Upload Files (Model / Data)
export const uploadS3 = async (upload_type, user_uid, uid, file) => {
const res = await axios
Expand Down Expand Up @@ -532,7 +639,6 @@ export const uploadS3Multipart = async (upload_type, user_uid, uid, file) => {
};

// Logs

export const getLogs = async (user_uid) => {
const res = await axios
.get(`${DB_API}/logs`, {
Expand Down
38 changes: 22 additions & 16 deletions frontend/sskai-console/src/pages/Inference/index.jsx
Original file line number Diff line number Diff line change
Expand Up @@ -22,8 +22,10 @@ import {
} from 'antd';
import { useEffect, useState } from 'react';
import {
createFMInference,
createServerlessInference,
createSpotInference,
deleteFMInference,
deleteServerlessInference,
deleteSpotInference,
getInferences,
Expand Down Expand Up @@ -234,9 +236,11 @@ export default function Inference(props) {
}
};
const endpoint =
inferenceType === 'Spot'
? await createSpotInference(args)
: await createServerlessInference(args);
inferenceType === 'Serverless'
? await createServerlessInference(args)
: selectedModel.type === 'user'
? await createSpotInference(args)
: await createFMInference(selectedModel.type, args);

setIsCreateLoading(false);
if (!endpoint)
Expand All @@ -256,18 +260,24 @@ export default function Inference(props) {
const target = selectedDetail[0];
if (!target) return;
target?.streamlit_url && (await handleStreamlit('delete'));
target.type === 'Spot'
? await deleteSpotInference({
uid: target.uid,
user: target.user,
name: target.name
})
: await deleteServerlessInference({
target.type === 'Serverless'
? await deleteServerlessInference({
uid: target.uid,
user: target.user,
model: target.model,
name: target.name
});
})
: target.model_type === 'user'
? await deleteSpotInference({
uid: target.uid,
user: target.user,
name: target.name
})
: await deleteFMInference(target.model_type, {
uid: target.uid,
user: target.user,
name: target.name
});
await fetchData();
messageApi.open({
type: 'success',
Expand Down Expand Up @@ -408,11 +418,7 @@ export default function Inference(props) {
<Button
type={'default'}
onClick={() =>
window.open(
selectedDetail[0].streamlit_url,
'_blank',
'rel=noopener noreferrer'
)
window.open(selectedDetail[0].streamlit_url, '_blank')
}
loading={isDeployLoading}
disabled={
Expand Down

0 comments on commit d217387

Please sign in to comment.