Skip to content

Commit

Permalink
Merge pull request #65 from CommandDash/rel-0.0.7
Browse files Browse the repository at this point in the history
Rel 0.0.7
  • Loading branch information
samyakkkk authored May 15, 2024
2 parents 2c895e0 + 4f89cbd commit 6403cb9
Show file tree
Hide file tree
Showing 10 changed files with 115 additions and 33 deletions.
5 changes: 3 additions & 2 deletions commanddash/lib/agent/output_model.dart
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,8 @@ abstract class Output {

class MultiCodeOutput extends Output {
List<WorkspaceFile>? value;
MultiCodeOutput([this.value]) : super(OutputType.multiCodeOutput, 6000 * 2.7);
MultiCodeOutput([this.value])
: super(OutputType.multiCodeOutput, 10000 * 2.7);

@override
String toString() {
Expand Down Expand Up @@ -106,7 +107,7 @@ class DataSourceResultOutput extends Output {
// TODO: limit for each output

DataSourceResultOutput([this.value])
: super(OutputType.dataSourceOuput, 6000 * 2.7);
: super(OutputType.dataSourceOuput, 15000 * 2.7);

@override
String toString() {
Expand Down
21 changes: 13 additions & 8 deletions commanddash/lib/models/workspace_file.dart
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,19 @@ class WorkspaceFile {
}
final contentLines = File(path).readAsLinesSync();
final codeHash = computeCodeHash(contentLines.join('\n'));
selectedRanges = selectedRanges ??
[
Range(
start: Position(line: 0, character: 0),
end: Position(
line: contentLines.length - 1,
character: contentLines.last.length - 1))
];
if (contentLines.isEmpty) {
selectedRanges = [];
} else {
selectedRanges = selectedRanges ??
[
Range(
start: Position(line: 0, character: 0),
end: Position(
line: contentLines.length - 1,
character: contentLines.last.length - 1))
];
}

return WorkspaceFile(path,
contentLines: contentLines,
codeHash: codeHash,
Expand Down
58 changes: 46 additions & 12 deletions commanddash/lib/repositories/gemini_repository.dart
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,26 @@ class UnknownException implements Exception {

class GeminiRepository implements GenerationRepository {
final String apiKey;
@override
double characterLimit = 100000 * 2.7;

GeminiRepository(this.apiKey);
@override
Future<String> getCompletion(
String messages,
) async {
// For text-only input, use the gemini-pro model
final model = GenerativeModel(model: 'gemini-pro', apiKey: apiKey);
final content = [Content.text(messages)];
final response = await model.generateContent(content);
if (response.text != null) {
late final GenerateContentResponse? response;
try {
response = await _getGeminiFlashCompletionResponse(
'gemini-1.5-flash-latest', messages);
} on ServerException catch (e) {
if (e.message.contains(
'found for API version v1beta, or is not supported for GenerateContent')) {
response =
await _getGeminiFlashCompletionResponse('gemini-pro', messages);
}
}
if (response != null && response.text != null) {
return response.text!;
} else {
throw ModelException("No response recieved from gemini");
Expand Down Expand Up @@ -150,7 +160,36 @@ class GeminiRepository implements GenerationRepository {
@override
Future<String> getChatCompletion(
List<ChatMessage> messages, String lastMessage) async {
final model = GenerativeModel(model: 'gemini-pro', apiKey: apiKey);
late final GenerateContentResponse? response;

try {
response = await _getGeminiFlashChatCompletionResponse(
'gemini-1.5-flash-latest', messages, lastMessage);
} on ServerException catch (e) {
if (e.message.contains(
'found for API version v1beta, or is not supported for GenerateContent')) {
response = await _getGeminiFlashChatCompletionResponse(
'gemini-pro', messages, lastMessage);
}
}

if (response != null && response.text != null) {
return response.text!;
} else {
throw ModelException("No response recieved from gemini");
}
}

Future<GenerateContentResponse> _getGeminiFlashCompletionResponse(
String modelCode, String messages) async {
final model = GenerativeModel(model: modelCode, apiKey: apiKey);
final content = [Content.text(messages)];
return model.generateContent(content);
}

Future<GenerateContentResponse> _getGeminiFlashChatCompletionResponse(
String modelCode, List<ChatMessage> messages, String lastMessage) async {
final model = GenerativeModel(model: modelCode, apiKey: apiKey);
final Content content = Content.text(lastMessage);
final history = messages.map((e) {
if (e.role == ChatRole.user) {
Expand All @@ -161,11 +200,6 @@ class GeminiRepository implements GenerationRepository {
}).toList();

final chat = model.startChat(history: history);
var response = await chat.sendMessage(content);
if (response.text != null) {
return response.text!;
} else {
throw ModelException("No response recieved from gemini");
}
return chat.sendMessage(content);
}
}
1 change: 1 addition & 0 deletions commanddash/lib/repositories/generation_repository.dart
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ part 'generation_exceptions.dart';

//Can be implemented to provide generations.
abstract class GenerationRepository {
late double characterLimit;
Future<String> getCompletion(String message);
Future<String> getChatCompletion(
List<ChatMessage> messages, String lastMessage);
Expand Down
2 changes: 1 addition & 1 deletion commanddash/lib/runner.dart
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ class VersionCommand extends Command {
@override
void run() {
/// It is not possible to fetch version from pubspec.yaml hence assigning manually
print('0.0.6');
print('0.0.7');
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,6 +73,10 @@ class EmbeddingGenerator {
calculateCosineSimilarity(queryEmbeddings, b.embedding!);
return distanceB.compareTo(distanceA);
}));
return files.sublist(0, 3);
if (files.length > 3) {
return files.sublist(0, 3);
} else {
return files;
}
}
}
15 changes: 9 additions & 6 deletions commanddash/lib/steps/prompt_query/prompt_query_step.dart
Original file line number Diff line number Diff line change
Expand Up @@ -55,7 +55,7 @@ class PromptQueryStep extends Step {
String prompt = query;
int promptLength = prompt.length;

double availableToken = (24000 * 2.7) -
double availableToken = generationRepository.characterLimit -
promptLength; // Max limit should come from the generation repository
// If there are available token, we will add the outputs
if (availableToken <= 0) {
Expand Down Expand Up @@ -118,10 +118,12 @@ class PromptQueryStep extends Step {
timeoutKind: TimeoutKind.stretched,
);
final context = data['context'];
final listOfContext = List<Map<String, dynamic>>.from(context);
for (final nestedCode in listOfContext) {
final filePath = nestedCode['filePath'];
appendNestedCodeCount(filePath);
if (context != null) {
final listOfContext = List<Map<String, dynamic>>.from(context);
for (final nestedCode in listOfContext) {
final filePath = nestedCode['filePath'];
appendNestedCodeCount(filePath);
}
}
}
break;
Expand Down Expand Up @@ -177,10 +179,11 @@ class PromptQueryStep extends Step {
contextualCode = '$contextualCode\n\n[END OF CONTEXTUAL CODE.]\n\n';
prompt = '$contextualCode$prompt';

final filesInvolved = Set<String>.from(
var filesInvolved = Set<String>.from(
includedInPrompt.map((e) => e.path).toList() +
nestedCodes.keys.toList())
.map((e) => e.split('/').last)
.take(7)
.toList();
await taskAssist.processStep(
kind: 'loader_update',
Expand Down
2 changes: 1 addition & 1 deletion commanddash/pubspec.yaml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
name: commanddash
description: CLI enhancments to Dash-AI
version: 0.0.6
version: 0.0.7
repository: https://github.com/Welltested-AI/commanddash

environment:
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import 'package:commanddash/models/chat_message.dart';
import 'package:commanddash/repositories/gemini_repository.dart';
import 'package:commanddash/repositories/generation_repository.dart';
import 'package:test/test.dart';
Expand Down Expand Up @@ -78,4 +79,27 @@ void main() {
throwsA(isA<InvalidApiKeyException>()));
});
});
group('Completion', () {
test('get completion with correct key', () async {
final geminiRepository = GeminiRepository(apiKey!);
final result = await geminiRepository.getCompletion("12345 till 10");

expect(result, isA<String>());
});

test('get chat compeltion', () async {
final geminiRepository = GeminiRepository(apiKey!);
final messages = [
ChatMessage(
role: ChatRole.user,
message: 'Hello',
),
];
final lastMessage = 'Hello';

final response =
await geminiRepository.getChatCompletion(messages, lastMessage);
expect(response, isA<String>());
});
});
}
14 changes: 12 additions & 2 deletions commanddash/test/steps/workspace_query_test.dart
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ void main() {
{
"id": "736841542",
"type": "string_input",
"value": "Where is the themeing of the app?"
"value": "What does the operations class do?"
},
{
"id": "736841543",
Expand Down Expand Up @@ -103,7 +103,10 @@ void main() {

messageStreamController
.add(StepResponseMessage(1, 'cache', data: {'value': '{}'}));

result = await queue.next;
expect(result, isA<StepMessage>());
expect(result.id, 1);
expect((result as StepMessage).kind, 'update_cache');
result = await queue.next;
expect(result, isA<StepMessage>());
expect(result.id, 1);
Expand All @@ -116,6 +119,13 @@ void main() {
expect(result, isA<StepMessage>());
expect(result.id, 1);
expect((result as StepMessage).kind, 'loader_update');
expect(result.args['kind'], 'processingFiles');
messageStreamController
.add(StepResponseMessage(1, 'loader_update', data: {}));
result = await queue.next;
expect(result, isA<StepMessage>());
expect(result.id, 1);
expect((result as StepMessage).kind, 'loader_update');
expect(result.args['kind'], 'none');
messageStreamController
.add(StepResponseMessage(1, 'loader_update', data: {}));
Expand Down

0 comments on commit 6403cb9

Please sign in to comment.