more changes to make import from gemini-cli work
This commit is contained in:
parent
aebfe325d6
commit
aa2483e255
5
Makefile
5
Makefile
|
@ -1,4 +1,4 @@
|
|||
all: clean chat.pb.go goimports vet
|
||||
all: clean chat.pb.go book.pb.go goimports vet
|
||||
|
||||
goimports:
|
||||
goimports -w *.go
|
||||
|
@ -6,6 +6,9 @@ goimports:
|
|||
chat.pb.go: chat.proto
|
||||
autogenpb --proto chat.proto
|
||||
|
||||
book.pb.go: book.proto
|
||||
autogenpb --proto book.proto
|
||||
|
||||
clean:
|
||||
rm -f *.pb.go *.patch
|
||||
-rm -f go.*
|
||||
|
|
|
@ -6,6 +6,7 @@ import (
|
|||
"time"
|
||||
|
||||
"go.wit.com/log"
|
||||
timestamppb "google.golang.org/protobuf/types/known/timestamppb"
|
||||
)
|
||||
|
||||
// returns true if the pb was added
|
||||
|
@ -23,14 +24,12 @@ func (c *Chat) AddGeminiRequest(fname string, age time.Time, pb *GeminiRequest)
|
|||
return false
|
||||
}
|
||||
}
|
||||
log.Info("not sure if c.Entries == pb for real. need to read Content & Parts")
|
||||
/*
|
||||
log.Info("Adding new ChatEntry for", "/tmp/"+fname)
|
||||
e := new(ChatEntry)
|
||||
e.Ctime = timestamppb.New(age)
|
||||
e.From = Who_USER
|
||||
e.ContentFile = fname
|
||||
e.GeminiRequest = pb
|
||||
c.AppendEntry(e)
|
||||
*/
|
||||
return true
|
||||
}
|
||||
|
|
|
@ -0,0 +1,24 @@
|
|||
syntax = "proto3";
|
||||
|
||||
package chatpb;
|
||||
|
||||
import "google/protobuf/timestamp.proto";
|
||||
import "google/protobuf/struct.proto";
|
||||
import "chat.proto";
|
||||
|
||||
message Book {
|
||||
string uuid = 1; // `autogenpb:unique` `autogenpb:sort`
|
||||
google.protobuf.Timestamp ctime = 2;
|
||||
string Title = 3;
|
||||
int32 version = 4;
|
||||
Who from = 5;
|
||||
string content = 6;
|
||||
Table table = 7;
|
||||
GeminiRequest GeminiRequest = 8;
|
||||
}
|
||||
|
||||
message Books { // `autogenpb:marshal` `autogenpb:mutex` `autogenpb:gui`
|
||||
string uuid = 1; // `autogenpb:uuid:8b6409ad-4498-43a6-b09a-7835c00dcb9a`
|
||||
string version = 2; // `autogenpb:version:v0.0.1`
|
||||
repeated Book Books = 3; // THIS MUST BE Chat and then Chats
|
||||
}
|
24
chat.proto
24
chat.proto
|
@ -36,14 +36,19 @@ message FunctionDeclaration {
|
|||
Schema parameters_json_schema = 3;
|
||||
}
|
||||
|
||||
message GoogleSearch {
|
||||
}
|
||||
|
||||
message Tool {
|
||||
repeated FunctionDeclaration functionDeclarations = 1;
|
||||
GoogleSearch googleSearch = 2;
|
||||
}
|
||||
|
||||
// Configuration for the request
|
||||
message Config {
|
||||
message ThinkingConfig {
|
||||
bool includeThoughts = 1;
|
||||
int32 thinkingBudget = 2;
|
||||
}
|
||||
double temperature = 2;
|
||||
double topP = 3;
|
||||
|
@ -65,9 +70,27 @@ message ResponseJsonSchema {
|
|||
message Properties {
|
||||
Reasoning reasoning = 1;
|
||||
NextSpeaker next_speaker = 2;
|
||||
CorrectedNewStringEscaping corrected_new_string_escaping = 3;
|
||||
CorrectedTargetSnippet corrected_target_snippet = 4;
|
||||
Confidence confidence = 5;
|
||||
}
|
||||
|
||||
message Confidence {
|
||||
string type = 1;
|
||||
string description = 2;
|
||||
}
|
||||
|
||||
message CorrectedTargetSnippet {
|
||||
string type = 1;
|
||||
string description = 2;
|
||||
}
|
||||
|
||||
// Reasoning property
|
||||
message CorrectedNewStringEscaping {
|
||||
string type = 1;
|
||||
string description = 2;
|
||||
}
|
||||
|
||||
message Reasoning {
|
||||
string type = 1;
|
||||
string description = 2;
|
||||
|
@ -116,6 +139,7 @@ message argsInfo {
|
|||
string content = 12;
|
||||
string fact = 13;
|
||||
repeated string paths = 14;
|
||||
string query = 15;
|
||||
}
|
||||
|
||||
// Function response
|
||||
|
|
|
@ -77,7 +77,7 @@ func (all *Chats) ConfigSave() error {
|
|||
log.Infof("chatpb.ConfigSave() failed len(Chats)=%d bytes=%d", len(cleanChats.Chats), len(data))
|
||||
return err
|
||||
}
|
||||
configWrite("regex.text", []byte(cleanChats.FormatTEXT()))
|
||||
// configWrite("regex.text", []byte(cleanChats.FormatTEXT()))
|
||||
// log.Infof("chatpb.ConfigSave() worked len(Chats)=%d bytes=%d", len(cleanChats.Chats), len(data))
|
||||
return nil
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue