Skip to content

Commit

Permalink
refactor pkg/datamodel --> pkg/schema
Browse files Browse the repository at this point in the history
  • Loading branch information
dave-gray101 committed Jan 4, 2024
1 parent 9792f48 commit 9ebd62c
Show file tree
Hide file tree
Showing 39 changed files with 263 additions and 263 deletions.
10 changes: 5 additions & 5 deletions backend/go/transcribe/transcript.go
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ import (

"github.com/ggerganov/whisper.cpp/bindings/go/pkg/whisper"
"github.com/go-audio/wav"
"github.com/go-skynet/LocalAI/pkg/datamodel"
"github.com/go-skynet/LocalAI/pkg/schema"
)

func sh(c string) (string, error) {
Expand All @@ -29,8 +29,8 @@ func audioToWav(src, dst string) error {
return nil
}

func Transcript(model whisper.Model, audiopath, language string, threads uint) (datamodel.WhisperResult, error) {
res := datamodel.WhisperResult{}
func Traschema.del whisper.Model, audiopath, language string, threads uint) (schema.WhisperResult, error) {
res := schema.WhisperResult{}

dir, err := os.MkdirTemp("", "whisper")
if err != nil {
Expand Down Expand Up @@ -89,8 +89,8 @@ func Transcript(model whisper.Model, audiopath, language string, threads uint) (
for _, t := range s.Tokens {
tokens = append(tokens, t.Id)
}

segment := datamodel.WhisperSegment{Id: s.Num, Text: s.Text, Start: s.Start, End: s.End, Tokens: tokens}
schema.
segment := schema.WhisperSegment{Id: s.Num, Text: s.Text, Start: s.Start, End: s.End, Tokens: tokens}
res.Segments = append(res.Segments, segment)

res.Text += s.Text
Expand Down
4 changes: 2 additions & 2 deletions backend/go/transcribe/whisper.go
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ package main
// It is meant to be used by the main executable that is the server for the specific backend type (falcon, gpt3, etc)
import (
"github.com/ggerganov/whisper.cpp/bindings/go/pkg/whisper"
"github.com/go-skynet/LocalAI/pkg/datamodel"
"github.com/go-skynet/LocalAI/pkg/grpc/base"
pb "github.com/go-skynet/LocalAI/pkg/grpc/proto"
"github.com/go-skynet/LocalAI/pkg/schema"
)

type Whisper struct {
Expand All @@ -21,6 +21,6 @@ func (sd *Whisper) Load(opts *pb.ModelOptions) error {
return err
}

func (sd *Whisper) AudioTranscription(opts *pb.TranscriptRequest) (datamodel.WhisperResult, error) {
func (sd *Whisper) AudioTranscription(opts *pb.TranscriptRequest) (schema.WhisperResult, error) {
return Transcript(sd.whisper, opts.Dst, opts.Language, uint(opts.Threads))
}
14 changes: 7 additions & 7 deletions core/backend/embeddings.go
Original file line number Diff line number Diff line change
Expand Up @@ -5,14 +5,14 @@ import (
"time"

"github.com/go-skynet/LocalAI/core/services"
"github.com/go-skynet/LocalAI/pkg/datamodel"
"github.com/go-skynet/LocalAI/pkg/grpc"
"github.com/go-skynet/LocalAI/pkg/model"
"github.com/go-skynet/LocalAI/pkg/schema"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)

func ModelEmbedding(s string, tokens []int, loader *model.ModelLoader, c datamodel.Config, o *datamodel.StartupOptions) (func() ([]float32, error), error) {
func ModelEmbedding(s string, tokens []int, loader *model.ModelLoader, c schema.Config, o *schema.StartupOptions) (func() ([]float32, error), error) {
if !c.Embeddings {
return nil, fmt.Errorf("endpoint disabled for this model by API configuration")
}
Expand Down Expand Up @@ -95,14 +95,14 @@ func ModelEmbedding(s string, tokens []int, loader *model.ModelLoader, c datamod
}, nil
}

func EmbeddingOpenAIRequest(modelName string, input *datamodel.OpenAIRequest, cl *services.ConfigLoader, ml *model.ModelLoader, startupOptions *datamodel.StartupOptions) (*datamodel.OpenAIResponse, error) {
func EmbeddingOpenAIRequest(modelName string, input *schema.nAIRequest, cl *services.ConfigLoader, ml *model.ModelLoader, startupOptions *schschema.pOptions) (*schemaschema.ponse, error) {
config, input, err := ReadConfigFromFileAndCombineWithOpenAIRequest(modelName, input, cl, startupOptions)
if err != nil {
return nil, fmt.Errorf("failed reading parameters from request:%w", err)
}

log.Debug().Msgf("Parameter Config: %+v", config)
items := []datamodel.Item{}
items := []schema.m{}

for i, s := range config.InputToken {
// get the model function to call for the result
Expand All @@ -115,7 +115,7 @@ func EmbeddingOpenAIRequest(modelName string, input *datamodel.OpenAIRequest, cl
if err != nil {
return nil, err
}
items = append(items, datamodel.Item{Embedding: embeddings, Index: i, Object: "embedding"})
items = append(items, schema.m{Embedding: embeddings, Index: i, Object: "embedding"})
}

for i, s := range config.InputStrings {
Expand All @@ -129,12 +129,12 @@ func EmbeddingOpenAIRequest(modelName string, input *datamodel.OpenAIRequest, cl
if err != nil {
return nil, err
}
items = append(items, datamodel.Item{Embedding: embeddings, Index: i, Object: "embedding"})
items = append(items, schema.m{Embedding: embeddings, Index: i, Object: "embedding"})
}

id := uuid.New().String()
created := int(time.Now().Unix())
return &datamodel.OpenAIResponse{
return &schema.nAIResponse{
ID: id,
Created: created,
Model: input.Model, // we have to return what the user sent here, due to OpenAI spec.
Expand Down
12 changes: 6 additions & 6 deletions core/backend/image.go
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@ import (
"time"

"github.com/go-skynet/LocalAI/core/services"
"github.com/go-skynet/LocalAI/pkg/datamodel"
"github.com/go-skynet/LocalAI/pkg/grpc/proto"
"github.com/go-skynet/LocalAI/pkg/model"
"github.com/go-skynet/LocalAI/pkg/schema"
"github.com/go-skynet/LocalAI/pkg/utils"
"github.com/google/uuid"
"github.com/rs/zerolog/log"
)

func ImageGeneration(height, width, mode, step, seed int, positive_prompt, negative_prompt, src, dst string, loader *model.ModelLoader, c datamodel.Config, o *datamodel.StartupOptions) (func() error, error) {
func ImageGeneration(height, width, mode, step, seed int, positive_prompt, negative_prompt, src, dst string, loader *model.ModelLoader, c schema.Config, o *schema.StartupOptions) (func() error, error) {

opts := modelOpts(c, o, []model.Option{
model.WithBackendString(c.Backend),
Expand Down Expand Up @@ -73,7 +73,7 @@ func ImageGeneration(height, width, mode, step, seed int, positive_prompt, negat
return fn, nil
}

func ImageGenerationOpenAIRequest(modelName string, input *datamodel.OpenAIRequest, cl *services.ConfigLoader, ml *model.ModelLoader, startupOptions *datamodel.StartupOptions) (*datamodel.OpenAIResponse, error) {
func ImageGenerationOpenAIRequest(modelName string, input *schema.nAIRequest, cl *services.ConfigLoader, ml *model.ModelLoader, startupOptions *schschema.pOptions) (*schemaschema.ponse, error) {
id := uuid.New().String()
created := int(time.Now().Unix())

Expand Down Expand Up @@ -131,7 +131,7 @@ func ImageGenerationOpenAIRequest(modelName string, input *datamodel.OpenAIReque
b64JSON = true
}
// src and clip_skip
var result []datamodel.Item
var result []schema.m
for _, i := range config.PromptStrings {
n := input.N
if input.N == 0 {
Expand Down Expand Up @@ -184,7 +184,7 @@ func ImageGenerationOpenAIRequest(modelName string, input *datamodel.OpenAIReque
return nil, err
}

item := &datamodel.Item{}
item := &schema.m{}

if b64JSON {
defer os.RemoveAll(output)
Expand All @@ -202,7 +202,7 @@ func ImageGenerationOpenAIRequest(modelName string, input *datamodel.OpenAIReque
}
}

return &datamodel.OpenAIResponse{
return &schema.nAIResponse{
ID: id,
Created: created,
Data: result,
Expand Down
Loading

0 comments on commit 9ebd62c

Please sign in to comment.